From 2c74008f6101062f2f25c978a1d75eec5a842359 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 26 Jan 2022 12:37:56 +0200 Subject: [PATCH] Fixed RE of indexes and functions from PostgreSQL:10 --- reverse_engineering/api.js | 420 +++++------ .../helpers/connectionHelper.js | 214 +++--- reverse_engineering/helpers/db.js | 78 +- reverse_engineering/helpers/getJsonSchema.js | 40 +- reverse_engineering/helpers/loggerHelper.js | 90 +-- .../helpers/postgresHelpers/columnHelper.js | 408 +++++----- .../helpers/postgresHelpers/common.js | 21 +- .../postgresHelpers/foreignKeysHelper.js | 30 +- .../helpers/postgresHelpers/functionHelper.js | 128 ++-- .../helpers/postgresHelpers/tableHelper.js | 560 +++++++------- .../postgresHelpers/userDefinedTypesHelper.js | 168 ++--- .../helpers/postgresHelpers/viewHelper.js | 56 +- .../helpers/postgresService.js | 704 +++++++++--------- reverse_engineering/helpers/queryConstants.js | 216 +++--- 14 files changed, 1596 insertions(+), 1537 deletions(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 20b34d4..48c6c85 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -4,223 +4,223 @@ const { createLogger, getSystemInfo } = require('./helpers/loggerHelper'); const postgresService = require('./helpers/postgresService'); module.exports = { - async disconnect(connectionInfo, logger, callback, app) { - await postgresService.disconnect(); - - callback(); - }, - - async testConnection(connectionInfo, logger, callback, app) { - try { - logInfo('Test connection', connectionInfo, logger); - - const postgresLogger = createLogger({ - title: 'Test connection instance log', - hiddenKeys: connectionInfo.hiddenKeys, - logger, - }); - - postgresService.setDependencies(app); - await postgresService.connect(connectionInfo, postgresLogger); - await postgresService.pingDb(); - await postgresService.logVersion(); - callback(); - } catch (error) { - logger.log('error', prepareError(error), 'Test connection instance log'); - callback(prepareError(error)); - } finally { - await postgresService.disconnect(); - } - }, - - async getDatabases(connectionInfo, logger, cb, app) { - try { - logInfo('Get databases', connectionInfo, logger); - - const postgresLogger = createLogger({ - title: 'Get DB names', - hiddenKeys: connectionInfo.hiddenKeys, - logger, - }); - - postgresService.setDependencies(app); - await postgresService.connect(connectionInfo, postgresLogger); - await postgresService.logVersion(); - - const dbs = await postgresService.getDatabaseNames(); - logger.log('info', dbs, 'All databases list', connectionInfo.hiddenKeys); - return cb(null, dbs); - } catch (err) { - logger.log('error', err); - return cb(prepareError(err)); - } - }, - - getDocumentKinds: function (connectionInfo, logger, cb) { - cb(null, []); - }, - - async getDbCollectionsNames(connectionInfo, logger, callback, app) { - try { - logInfo('Get DB table names', connectionInfo, logger); - - const postgresLogger = createLogger({ - title: 'Get DB collections names', - hiddenKeys: connectionInfo.hiddenKeys, - logger, - }); - - postgresService.setDependencies(app); - await postgresService.connect(connectionInfo, postgresLogger); - await postgresService.logVersion(); - const schemasNames = await postgresService.getAllSchemasNames(); - - const collections = await schemasNames.reduce(async (next, dbName) => { - const result = await next; - try { - const dbCollections = await postgresService.getTablesNames(dbName); - - return result.concat({ - dbName, - dbCollections, - isEmpty: dbCollections.length === 0, - }); - } catch (error) { - postgresLogger.info(`Error reading database "${dbName}"`); - postgresLogger.error(error); - - return result.concat({ - dbName, - dbCollections: [], - isEmpty: true, - status: true, - }); - } - }, Promise.resolve([])); - - callback(null, collections); - } catch (error) { - logger.log('error', prepareError(error), 'Get DB collections names'); - callback(prepareError(error)); - await postgresService.disconnect(); - } - }, - - async getDbCollectionsData(data, logger, callback, app) { - try { - logger.log('info', data, 'Retrieve tables data:', data.hiddenKeys); - - const postgresLogger = createLogger({ - title: 'Get DB collections data log', - hiddenKeys: data.hiddenKeys, - logger, - }); - - postgresLogger.progress('Start reverse engineering...'); - - const collections = data.collectionData.collections; - const schemasNames = data.collectionData.dataBaseNames; - - const modelData = await postgresService.getDbLevelData(); - - const { packages, relationships } = await Promise.all( - schemasNames.map(async schemaName => { - const { tables, views, modelDefinitions } = await postgresService.retrieveEntitiesData( - schemaName, - collections[schemaName], - data.recordSamplingSettings - ); - const { functions, procedures } = await postgresService.retrieveFunctionsWithProcedures(schemaName); - - postgresLogger.progress('Schema reversed successfully', schemaName); - - return { - schemaName, - tables, - views, - functions, - procedures, - modelDefinitions, - }; - }) - ) - .then(schemaData => { - const relationships = schemaData - .flatMap(({ tables }) => tables.map(entityData => entityData.relationships)) - .flat(); - - const packages = schemaData.flatMap( - ({ schemaName, tables, views, functions, procedures, modelDefinitions }) => { - const bucketInfo = { - UDFs: functions, - Procedures: procedures, - }; - - const tablePackages = tables - .map(entityData => ({ - dbName: schemaName, - collectionName: entityData.name, - documents: entityData.documents, - views: [], - emptyBucket: false, - entityLevel: entityData.entityLevel, - validation: { - jsonSchema: entityData.jsonSchema, - }, - bucketInfo, - modelDefinitions, - })) - .sort(data => (app.require('lodash').isEmpty(data.entityLevel.inherits) ? -1 : 1)); - - if (views?.length) { - const viewPackage = { - dbName: schemaName, - views: views, - emptyBucket: false, - }; - - return [...tablePackages, viewPackage]; - } - - return tablePackages; - } - ); - return { packages, relationships }; - }) - .then(({ packages, relationships }) => ({ packages: orderPackages(packages), relationships })); - - callback(null, packages, modelData, relationships); - } catch (error) { - logger.log('error', prepareError(error), 'Retrieve tables data'); - callback(prepareError(error)); - } finally { - await postgresService.disconnect(); - } - }, + async disconnect(connectionInfo, logger, callback, app) { + await postgresService.disconnect(); + + callback(); + }, + + async testConnection(connectionInfo, logger, callback, app) { + try { + logInfo('Test connection', connectionInfo, logger); + + const postgresLogger = createLogger({ + title: 'Test connection instance log', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); + + postgresService.setDependencies(app); + await postgresService.connect(connectionInfo, postgresLogger); + await postgresService.pingDb(); + await postgresService.logVersion(); + callback(); + } catch (error) { + logger.log('error', prepareError(error), 'Test connection instance log'); + callback(prepareError(error)); + } finally { + await postgresService.disconnect(); + } + }, + + async getDatabases(connectionInfo, logger, cb, app) { + try { + logInfo('Get databases', connectionInfo, logger); + + const postgresLogger = createLogger({ + title: 'Get DB names', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); + + postgresService.setDependencies(app); + await postgresService.connect(connectionInfo, postgresLogger); + await postgresService.logVersion(); + + const dbs = await postgresService.getDatabaseNames(); + logger.log('info', dbs, 'All databases list', connectionInfo.hiddenKeys); + return cb(null, dbs); + } catch (err) { + logger.log('error', err); + return cb(prepareError(err)); + } + }, + + getDocumentKinds: function (connectionInfo, logger, cb) { + cb(null, []); + }, + + async getDbCollectionsNames(connectionInfo, logger, callback, app) { + try { + logInfo('Get DB table names', connectionInfo, logger); + + const postgresLogger = createLogger({ + title: 'Get DB collections names', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); + + postgresService.setDependencies(app); + await postgresService.connect(connectionInfo, postgresLogger); + await postgresService.logVersion(); + const schemasNames = await postgresService.getAllSchemasNames(); + + const collections = await schemasNames.reduce(async (next, dbName) => { + const result = await next; + try { + const dbCollections = await postgresService.getTablesNames(dbName); + + return result.concat({ + dbName, + dbCollections, + isEmpty: dbCollections.length === 0, + }); + } catch (error) { + postgresLogger.info(`Error reading database "${dbName}"`); + postgresLogger.error(error); + + return result.concat({ + dbName, + dbCollections: [], + isEmpty: true, + status: true, + }); + } + }, Promise.resolve([])); + + callback(null, collections); + } catch (error) { + logger.log('error', prepareError(error), 'Get DB collections names'); + callback(prepareError(error)); + await postgresService.disconnect(); + } + }, + + async getDbCollectionsData(data, logger, callback, app) { + try { + logger.log('info', data, 'Retrieve tables data:', data.hiddenKeys); + + const postgresLogger = createLogger({ + title: 'Get DB collections data log', + hiddenKeys: data.hiddenKeys, + logger, + }); + + postgresLogger.progress('Start reverse engineering...'); + + const collections = data.collectionData.collections; + const schemasNames = data.collectionData.dataBaseNames; + + const modelData = await postgresService.getDbLevelData(); + + const { packages, relationships } = await Promise.all( + schemasNames.map(async schemaName => { + const { tables, views, modelDefinitions } = await postgresService.retrieveEntitiesData( + schemaName, + collections[schemaName], + data.recordSamplingSettings, + ); + const { functions, procedures } = await postgresService.retrieveFunctionsWithProcedures(schemaName); + + postgresLogger.progress('Schema reversed successfully', schemaName); + + return { + schemaName, + tables, + views, + functions, + procedures, + modelDefinitions, + }; + }), + ) + .then(schemaData => { + const relationships = schemaData + .flatMap(({ tables }) => tables.map(entityData => entityData.relationships)) + .flat(); + + const packages = schemaData.flatMap( + ({ schemaName, tables, views, functions, procedures, modelDefinitions }) => { + const bucketInfo = { + UDFs: functions, + Procedures: procedures, + }; + + const tablePackages = tables + .map(entityData => ({ + dbName: schemaName, + collectionName: entityData.name, + documents: entityData.documents, + views: [], + emptyBucket: false, + entityLevel: entityData.entityLevel, + validation: { + jsonSchema: entityData.jsonSchema, + }, + bucketInfo, + modelDefinitions, + })) + .sort(data => (app.require('lodash').isEmpty(data.entityLevel.inherits) ? -1 : 1)); + + if (views?.length) { + const viewPackage = { + dbName: schemaName, + views: views, + emptyBucket: false, + }; + + return [...tablePackages, viewPackage]; + } + + return tablePackages; + }, + ); + return { packages, relationships }; + }) + .then(({ packages, relationships }) => ({ packages: orderPackages(packages), relationships })); + + callback(null, packages, modelData, relationships); + } catch (error) { + logger.log('error', prepareError(error), 'Retrieve tables data'); + callback(prepareError(error)); + } finally { + await postgresService.disconnect(); + } + }, }; const prepareError = error => { - error = JSON.stringify(error, Object.getOwnPropertyNames(error)); - error = JSON.parse(error); - return error; + error = JSON.stringify(error, Object.getOwnPropertyNames(error)); + error = JSON.parse(error); + return error; }; const logInfo = (step, connectionInfo, logger) => { - logger.clear(); - logger.log('info', getSystemInfo(connectionInfo.appVersion), step); - logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); + logger.clear(); + logger.log('info', getSystemInfo(connectionInfo.appVersion), step); + logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); }; const orderPackages = packages => { - return packages.sort((packA, packB) => { - if (!packA.collectionName && !packB.collectionName) { - return 0; - } else if (!packA.collectionName) { - return 1; - } else if (!packB.collectionName) { - return -1; - } else { - return packA.collectionName.localeCompare(packB.collectionName); - } - }); + return packages.sort((packA, packB) => { + if (!packA.collectionName && !packB.collectionName) { + return 0; + } else if (!packA.collectionName) { + return 1; + } else if (!packB.collectionName) { + return -1; + } else { + return packA.collectionName.localeCompare(packB.collectionName); + } + }); }; diff --git a/reverse_engineering/helpers/connectionHelper.js b/reverse_engineering/helpers/connectionHelper.js index 7afd190..5dcf797 100644 --- a/reverse_engineering/helpers/connectionHelper.js +++ b/reverse_engineering/helpers/connectionHelper.js @@ -3,125 +3,125 @@ const ssh = require('tunnel-ssh'); const pg = require('pg'); const getSshConfig = info => { - const config = { - username: info.ssh_user, - host: info.ssh_host, - port: info.ssh_port, - dstHost: info.host, - dstPort: info.port, - localHost: '127.0.0.1', - localPort: info.port, - keepAlive: true, - }; - - if (info.ssh_method === 'privateKey') { - return Object.assign({}, config, { - privateKey: fs.readFileSync(info.ssh_key_file), - passphrase: info.ssh_key_passphrase, - }); - } else { - return Object.assign({}, config, { - password: info.ssh_password, - }); - } + const config = { + username: info.ssh_user, + host: info.ssh_host, + port: info.ssh_port, + dstHost: info.host, + dstPort: info.port, + localHost: '127.0.0.1', + localPort: info.port, + keepAlive: true, + }; + + if (info.ssh_method === 'privateKey') { + return Object.assign({}, config, { + privateKey: fs.readFileSync(info.ssh_key_file), + passphrase: info.ssh_key_passphrase, + }); + } else { + return Object.assign({}, config, { + password: info.ssh_password, + }); + } }; const connectViaSsh = info => - new Promise((resolve, reject) => { - ssh(getSshConfig(info), (err, tunnel) => { - if (err) { - reject(err); - } else { - resolve({ - tunnel, - info: Object.assign({}, info, { - host: '127.0.0.1', - }), - }); - } - }); - }); + new Promise((resolve, reject) => { + ssh(getSshConfig(info), (err, tunnel) => { + if (err) { + reject(err); + } else { + resolve({ + tunnel, + info: Object.assign({}, info, { + host: '127.0.0.1', + }), + }); + } + }); + }); const getSslOptions = (connectionInfo, logger) => { - const sslType = mapSslType(connectionInfo.sslType); - - if (!sslType || sslType === 'disable') { - return false; - } - - if (sslType === 'allow') { - return true; - } - - let sslOptions = { - checkServerIdentity(hostname, cert) { - logger.info('Certificate', { - hostname, - cert: { - subject: cert.subject, - issuer: cert.issuer, - valid_from: cert.valid_from, - valid_to: cert.valid_to, - }, - }); - } - }; - - if (fs.existsSync(connectionInfo.certAuthority)) { - sslOptions.ca = fs.readFileSync(connectionInfo.certAuthority).toString(); - } - - if (fs.existsSync(connectionInfo.clientCert)) { - sslOptions.cert = fs.readFileSync(connectionInfo.clientCert).toString(); - } - - if (fs.existsSync(connectionInfo.clientPrivateKey)) { - sslOptions.key = fs.readFileSync(connectionInfo.clientPrivateKey).toString(); - } - - return sslOptions; + const sslType = mapSslType(connectionInfo.sslType); + + if (!sslType || sslType === 'disable') { + return false; + } + + if (sslType === 'allow') { + return true; + } + + let sslOptions = { + checkServerIdentity(hostname, cert) { + logger.info('Certificate', { + hostname, + cert: { + subject: cert.subject, + issuer: cert.issuer, + valid_from: cert.valid_from, + valid_to: cert.valid_to, + }, + }); + }, + }; + + if (fs.existsSync(connectionInfo.certAuthority)) { + sslOptions.ca = fs.readFileSync(connectionInfo.certAuthority).toString(); + } + + if (fs.existsSync(connectionInfo.clientCert)) { + sslOptions.cert = fs.readFileSync(connectionInfo.clientCert).toString(); + } + + if (fs.existsSync(connectionInfo.clientPrivateKey)) { + sslOptions.key = fs.readFileSync(connectionInfo.clientPrivateKey).toString(); + } + + return sslOptions; }; const mapSslType = sslType => { - const oldToNewSslType = { - Off: 'disable', - TRUST_ALL_CERTIFICATES: 'allow', - TRUST_CUSTOM_CA_SIGNED_CERTIFICATES: 'prefer', - TRUST_SERVER_CLIENT_CERTIFICATES: 'verify-full', - }; - - return oldToNewSslType[sslType] || sslType; + const oldToNewSslType = { + Off: 'disable', + TRUST_ALL_CERTIFICATES: 'allow', + TRUST_CUSTOM_CA_SIGNED_CERTIFICATES: 'prefer', + TRUST_SERVER_CLIENT_CERTIFICATES: 'verify-full', + }; + + return oldToNewSslType[sslType] || sslType; }; const createClient = async (connectionInfo, logger) => { - let sshTunnel = null; - - if (connectionInfo.ssh) { - const { info, tunnel } = await connectViaSsh(connectionInfo); - sshTunnel = tunnel; - connectionInfo = info; - } - - const config = { - host: connectionInfo.host, - user: connectionInfo.userName, - password: connectionInfo.userPassword, - port: connectionInfo.port, - keepAlive: true, - ssl: getSslOptions(connectionInfo, logger), - connectionTimeoutMillis: Number(connectionInfo.queryRequestTimeout) || 60000, - query_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, - statement_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, - database: connectionInfo.database || connectionInfo.maintenanceDatabase, - application_name: 'Hackolade', - }; - - const client = new pg.Client(config); - await client.connect(); - - return { client, sshTunnel }; + let sshTunnel = null; + + if (connectionInfo.ssh) { + const { info, tunnel } = await connectViaSsh(connectionInfo); + sshTunnel = tunnel; + connectionInfo = info; + } + + const config = { + host: connectionInfo.host, + user: connectionInfo.userName, + password: connectionInfo.userPassword, + port: connectionInfo.port, + keepAlive: true, + ssl: getSslOptions(connectionInfo, logger), + connectionTimeoutMillis: Number(connectionInfo.queryRequestTimeout) || 60000, + query_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, + statement_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, + database: connectionInfo.database || connectionInfo.maintenanceDatabase, + application_name: 'Hackolade', + }; + + const client = new pg.Client(config); + await client.connect(); + + return { client, sshTunnel }; }; module.exports = { - createClient, + createClient, }; diff --git a/reverse_engineering/helpers/db.js b/reverse_engineering/helpers/db.js index 1cfbf35..be4ebd7 100644 --- a/reverse_engineering/helpers/db.js +++ b/reverse_engineering/helpers/db.js @@ -4,56 +4,56 @@ let client = null; let logger = null; module.exports = { - initializeClient(newClient, newLogger) { - client = newClient; - logger = newLogger; + initializeClient(newClient, newLogger) { + client = newClient; + logger = newLogger; - client.on('error', error => newLogger.error(error)); - }, + client.on('error', error => newLogger.error(error)); + }, - isClientInitialized() { - return Boolean(client); - }, + isClientInitialized() { + return Boolean(client); + }, - releaseClient() { - if (client) { - return new Promise(resolve => { - client.end(() => { - client = null; - resolve(); - }); - }); - } + releaseClient() { + if (client) { + return new Promise(resolve => { + client.end(() => { + client = null; + resolve(); + }); + }); + } - return Promise.resolve(); - }, + return Promise.resolve(); + }, - async query(query, params, firstRow = false) { - const queryName = queryConstants.getQueryName(query); + async query(query, params, firstRow = false) { + const queryName = queryConstants.getQueryName(query); - logger.info('Execute query', { queryName, params }); + logger.info('Execute query', { queryName, params }); - const start = Date.now(); - const result = await client.query(query, params); - const duration = Date.now() - start; + const start = Date.now(); + const result = await client.query(query, params); + const duration = Date.now() - start; - logger.info('Query executed', { queryName, params, duration, rowsCount: result.rowCount }); + logger.info('Query executed', { queryName, params, duration, rowsCount: result.rowCount }); - const rows = result.rows || []; + const rows = result.rows || []; - return firstRow ? rows[0] : rows; - }, + return firstRow ? rows[0] : rows; + }, - async queryTolerant(query, params, firstRow = false) { - try { - return await this.query(query, params, firstRow); - } catch (error) { - error.query = query; - error.params = params; + async queryTolerant(query, params, firstRow = false) { + try { + return await this.query(query, params, firstRow); + } catch (error) { + error.query = query; + error.params = params; - logger.error(error); + logger.error(error); - return null; - } - }, + return null; + } + }, }; diff --git a/reverse_engineering/helpers/getJsonSchema.js b/reverse_engineering/helpers/getJsonSchema.js index c846b37..1d3fbc3 100644 --- a/reverse_engineering/helpers/getJsonSchema.js +++ b/reverse_engineering/helpers/getJsonSchema.js @@ -1,28 +1,28 @@ const getJsonSchema = columns => { - const properties = columns.reduce((properties, column) => { - if (column.properties) { - return { - ...properties, - [column.name]: { - ...column, - ...getJsonSchema(column.properties), - }, - }; - } + const properties = columns.reduce((properties, column) => { + if (column.properties) { + return { + ...properties, + [column.name]: { + ...column, + ...getJsonSchema(column.properties), + }, + }; + } - return { - ...properties, - [column.name]: column, - }; - }, {}); + return { + ...properties, + [column.name]: column, + }; + }, {}); - const required = Object.entries(properties) - .filter(([filedName, field]) => field.required) - .map(([fieldName]) => fieldName); + const required = Object.entries(properties) + .filter(([filedName, field]) => field.required) + .map(([fieldName]) => fieldName); - return { properties, required }; + return { properties, required }; }; module.exports = { - getJsonSchema, + getJsonSchema, }; diff --git a/reverse_engineering/helpers/loggerHelper.js b/reverse_engineering/helpers/loggerHelper.js index 09f9327..658a8d3 100644 --- a/reverse_engineering/helpers/loggerHelper.js +++ b/reverse_engineering/helpers/loggerHelper.js @@ -3,70 +3,70 @@ const net = require('net'); const packageFile = require('../../package.json'); const createLogger = ({ title, logger, hiddenKeys }) => { - return { - info(message, additionalData = {}) { - logger.log('info', { message, ...additionalData }, title, hiddenKeys); - }, + return { + info(message, additionalData = {}) { + logger.log('info', { message, ...additionalData }, title, hiddenKeys); + }, - progress(message, dbName = '', tableName = '') { - logger.progress({ message, containerName: dbName, entityName: tableName }); - }, + progress(message, dbName = '', tableName = '') { + logger.progress({ message, containerName: dbName, entityName: tableName }); + }, - error(error) { - logger.log('error', prepareError(error), title); - }, - }; + error(error) { + logger.log('error', prepareError(error), title); + }, + }; }; const prepareError = error => { - error = JSON.stringify(error, Object.getOwnPropertyNames(error)); - error = JSON.parse(error); - return error; + error = JSON.stringify(error, Object.getOwnPropertyNames(error)); + error = JSON.parse(error); + return error; }; const getPluginVersion = () => packageFile.version; const getSystemInfo = appVersion => { - return ( - '' + - `Date: ${new Date()}` + - '\n' + - `Application version: ${appVersion}` + - '\n' + - `Plugin version: ${getPluginVersion()}` + - '\n\n' + - `System information:` + - '\n' + - ` Hostname: ${os.hostname()}` + - '\n' + - ` Platform: ${os.platform()} ${os.arch()}` + - '\n' + - ` Release: ${os.release()}` + - '\n' + - ` Uptime: ${toTime(os.uptime())}` + - '\n' + - ` Total RAM: ${(os.totalmem() / 1073741824).toFixed(2)} GB` + - '\n' + - ` CPU Model: ${os.cpus()[0].model}` + - '\n' + - ` CPU Clock: ${maxClock(os.cpus())} MHZ` + - '\n' + - ` CPU Cores: ${os.cpus().length} cores` + - '\n\n' - ); + return ( + '' + + `Date: ${new Date()}` + + '\n' + + `Application version: ${appVersion}` + + '\n' + + `Plugin version: ${getPluginVersion()}` + + '\n\n' + + `System information:` + + '\n' + + ` Hostname: ${os.hostname()}` + + '\n' + + ` Platform: ${os.platform()} ${os.arch()}` + + '\n' + + ` Release: ${os.release()}` + + '\n' + + ` Uptime: ${toTime(os.uptime())}` + + '\n' + + ` Total RAM: ${(os.totalmem() / 1073741824).toFixed(2)} GB` + + '\n' + + ` CPU Model: ${os.cpus()[0].model}` + + '\n' + + ` CPU Clock: ${maxClock(os.cpus())} MHZ` + + '\n' + + ` CPU Cores: ${os.cpus().length} cores` + + '\n\n' + ); }; const maxClock = cpus => { - return cpus.reduce((highestClock, cpu) => Math.max(highestClock, cpu.speed), 0); + return cpus.reduce((highestClock, cpu) => Math.max(highestClock, cpu.speed), 0); }; const prefixZero = number => (number < 10 ? '0' + number : number); const toTime = number => { - return Math.floor(number / 3600) + ':' + prefixZero(parseInt((number / 3600 - Math.floor(number / 3600)) * 60)); + return Math.floor(number / 3600) + ':' + prefixZero(parseInt((number / 3600 - Math.floor(number / 3600)) * 60)); }; module.exports = { - createLogger, - getSystemInfo, + createLogger, + getSystemInfo, }; diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 3a7f39e..415b551 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -1,257 +1,257 @@ let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const columnPropertiesMapper = { - column_default: 'default', - is_nullable: { - keyword: 'required', - values: { - YES: false, - NO: true, - }, - }, - not_null: 'required', - data_type: 'type', - numeric_precision: 'precision', - numeric_scale: 'scale', - datetime_precision: 'timePrecision', - attribute_mode: { - keyword: 'timePrecision', - check: (column, value) => value !== -1 && canHaveTimePrecision(column.data_type), - }, - interval_type: 'intervalOptions', - collation_name: 'collationRule', - column_name: 'name', - number_of_array_dimensions: 'numberOfArrayDimensions', - udt_name: 'udt_name', - character_maximum_length: 'length', - description: 'description', - domain_name: 'domain_name', + column_default: 'default', + is_nullable: { + keyword: 'required', + values: { + YES: false, + NO: true, + }, + }, + not_null: 'required', + data_type: 'type', + numeric_precision: 'precision', + numeric_scale: 'scale', + datetime_precision: 'timePrecision', + attribute_mode: { + keyword: 'timePrecision', + check: (column, value) => value !== -1 && canHaveTimePrecision(column.data_type), + }, + interval_type: 'intervalOptions', + collation_name: 'collationRule', + column_name: 'name', + number_of_array_dimensions: 'numberOfArrayDimensions', + udt_name: 'udt_name', + character_maximum_length: 'length', + description: 'description', + domain_name: 'domain_name', }; const getColumnValue = (column, key, value) => { - if (columnPropertiesMapper[key]?.check) { - return columnPropertiesMapper[key].check(column, value) ? value : ''; - } + if (columnPropertiesMapper[key]?.check) { + return columnPropertiesMapper[key].check(column, value) ? value : ''; + } - return _.get(columnPropertiesMapper, `${key}.values.${value}`, value); + return _.get(columnPropertiesMapper, `${key}.values.${value}`, value); }; const mapColumnData = userDefinedTypes => column => { - return _.chain(column) - .toPairs() - .map(([key, value]) => [ - columnPropertiesMapper[key]?.keyword || columnPropertiesMapper[key], - getColumnValue(column, key, value), - ]) - .filter(([key, value]) => key && !_.isNil(value)) - .fromPairs() - .thru(setColumnType(userDefinedTypes)) - .value(); + return _.chain(column) + .toPairs() + .map(([key, value]) => [ + columnPropertiesMapper[key]?.keyword || columnPropertiesMapper[key], + getColumnValue(column, key, value), + ]) + .filter(([key, value]) => key && !_.isNil(value)) + .fromPairs() + .thru(setColumnType(userDefinedTypes)) + .value(); }; const setColumnType = userDefinedTypes => column => ({ - ...column, - ...getType(userDefinedTypes, column), + ...column, + ...getType(userDefinedTypes, column), }); const getType = (userDefinedTypes, column) => { - if (column.type === 'ARRAY') { - return getArrayType(userDefinedTypes, column); - } + if (column.type === 'ARRAY') { + return getArrayType(userDefinedTypes, column); + } - if (column.type === 'USER-DEFINED') { - return mapType(userDefinedTypes, column.udt_name); - } + if (column.type === 'USER-DEFINED') { + return mapType(userDefinedTypes, column.udt_name); + } - if (column.domain_name) { - return mapType(userDefinedTypes, column.domain_name); - } + if (column.domain_name) { + return mapType(userDefinedTypes, column.domain_name); + } - return mapType(userDefinedTypes, column.type); + return mapType(userDefinedTypes, column.type); }; const getArrayType = (userDefinedTypes, column) => { - const typeData = mapType(userDefinedTypes, column.udt_name.slice(1)); + const typeData = mapType(userDefinedTypes, column.udt_name.slice(1)); - return { - ...typeData, - array_type: _.fill(Array(column.numberOfArrayDimensions), ''), - }; + return { + ...typeData, + array_type: _.fill(Array(column.numberOfArrayDimensions), ''), + }; }; const mapType = (userDefinedTypes, type) => { - switch (type) { - case 'bigint': - case 'bigserial': - case 'smallint': - case 'integer': - case 'numeric': - case 'real': - case 'double precision': - case 'smallserial': - case 'serial': - case 'money': - return { type: 'numeric', mode: type }; - case 'int8': - return { type: 'numeric', mode: 'bigint' }; - case 'int2': - return { type: 'numeric', mode: 'smallint' }; - case 'int4': - return { type: 'numeric', mode: 'integer' }; - case 'float4': - return { type: 'numeric', mode: 'real' }; - case 'float8': - return { type: 'numeric', mode: 'double precision' }; - case 'bit': - case 'char': - case 'text': - case 'tsvector': - case 'tsquery': - return { type: 'char', mode: type }; - case 'bit varying': - return { type: 'char', mode: 'varbit' }; - case 'character': - return { type: 'char', mode: 'char' }; - case 'character varying': - return { type: 'char', mode: 'varchar' }; - case 'bpchar': - return { type: 'char', mode: 'char' }; - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'box2d': - case 'box3d': - case 'geometry': - case 'geometry_dump': - case 'geography': - return { type: 'geometry', mode: type }; - case 'bytea': - return { type: 'binary', mode: type }; - case 'inet': - case 'cidr': - case 'macaddr': - case 'macaddr8': - return { type: 'inet', mode: type }; - case 'date': - case 'time': - case 'timestamp': - case 'interval': - return { type: 'datetime', mode: type }; - case 'timestamptz': - case 'timestamp with time zone': - return { type: 'datetime', mode: 'timestamp', timezone: 'WITH TIME ZONE' }; - case 'timestamp without time zone': - return { type: 'datetime', mode: 'timestamp', timezone: 'WITHOUT TIME ZONE' }; - case 'timetz': - case 'time with time zone': - return { type: 'datetime', mode: 'time', timezone: 'WITH TIME ZONE' }; - case 'time without time zone': - return { type: 'datetime', mode: 'time', timezone: 'WITHOUT TIME ZONE' }; - case 'json': - case 'jsonb': - return { type: 'json', mode: type, subtype: 'object' }; - case 'int4range': - case 'int8range': - case 'numrange': - case 'daterange': - case 'tsrange': - case 'tstzrange': - return { type: 'range', mode: type }; - case 'int4multirange': - case 'int8multirange': - case 'nummultirange': - case 'tsmultirange': - case 'tstzmultirange': - case 'datemultirange': - return { type: 'multirange', mode: type }; - case 'uuid': - case 'xml': - case 'boolean': - return { type }; - case 'bool': - return { type: 'boolean' }; - case 'oid': - case 'regclass': - case 'regcollation': - case 'regconfig': - case 'regdictionary': - case 'regnamespace': - case 'regoper': - case 'regoperator': - case 'regproc': - case 'regprocedure': - case 'regrole': - case 'regtype': - return { type: 'oid', mode: type }; + switch (type) { + case 'bigint': + case 'bigserial': + case 'smallint': + case 'integer': + case 'numeric': + case 'real': + case 'double precision': + case 'smallserial': + case 'serial': + case 'money': + return { type: 'numeric', mode: type }; + case 'int8': + return { type: 'numeric', mode: 'bigint' }; + case 'int2': + return { type: 'numeric', mode: 'smallint' }; + case 'int4': + return { type: 'numeric', mode: 'integer' }; + case 'float4': + return { type: 'numeric', mode: 'real' }; + case 'float8': + return { type: 'numeric', mode: 'double precision' }; + case 'bit': + case 'char': + case 'text': + case 'tsvector': + case 'tsquery': + return { type: 'char', mode: type }; + case 'bit varying': + return { type: 'char', mode: 'varbit' }; + case 'character': + return { type: 'char', mode: 'char' }; + case 'character varying': + return { type: 'char', mode: 'varchar' }; + case 'bpchar': + return { type: 'char', mode: 'char' }; + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'box2d': + case 'box3d': + case 'geometry': + case 'geometry_dump': + case 'geography': + return { type: 'geometry', mode: type }; + case 'bytea': + return { type: 'binary', mode: type }; + case 'inet': + case 'cidr': + case 'macaddr': + case 'macaddr8': + return { type: 'inet', mode: type }; + case 'date': + case 'time': + case 'timestamp': + case 'interval': + return { type: 'datetime', mode: type }; + case 'timestamptz': + case 'timestamp with time zone': + return { type: 'datetime', mode: 'timestamp', timezone: 'WITH TIME ZONE' }; + case 'timestamp without time zone': + return { type: 'datetime', mode: 'timestamp', timezone: 'WITHOUT TIME ZONE' }; + case 'timetz': + case 'time with time zone': + return { type: 'datetime', mode: 'time', timezone: 'WITH TIME ZONE' }; + case 'time without time zone': + return { type: 'datetime', mode: 'time', timezone: 'WITHOUT TIME ZONE' }; + case 'json': + case 'jsonb': + return { type: 'json', mode: type, subtype: 'object' }; + case 'int4range': + case 'int8range': + case 'numrange': + case 'daterange': + case 'tsrange': + case 'tstzrange': + return { type: 'range', mode: type }; + case 'int4multirange': + case 'int8multirange': + case 'nummultirange': + case 'tsmultirange': + case 'tstzmultirange': + case 'datemultirange': + return { type: 'multirange', mode: type }; + case 'uuid': + case 'xml': + case 'boolean': + return { type }; + case 'bool': + return { type: 'boolean' }; + case 'oid': + case 'regclass': + case 'regcollation': + case 'regconfig': + case 'regdictionary': + case 'regnamespace': + case 'regoper': + case 'regoperator': + case 'regproc': + case 'regprocedure': + case 'regrole': + case 'regtype': + return { type: 'oid', mode: type }; - default: { - if (_.some(userDefinedTypes, { name: type })) { - return { $ref: `#/definitions/${type}` }; - } + default: { + if (_.some(userDefinedTypes, { name: type })) { + return { $ref: `#/definitions/${type}` }; + } - return { type: 'char', mode: 'varchar' }; - } - } + return { type: 'char', mode: 'varchar' }; + } + } }; const setSubtypeFromSampledJsonValues = (columns, documents) => { - const sampleDocument = _.first(documents) || {}; + const sampleDocument = _.first(documents) || {}; - return columns.map(column => { - if (column.type !== 'json') { - return column; - } + return columns.map(column => { + if (column.type !== 'json') { + return column; + } - const sampleValue = sampleDocument[column.name]; - const parsedValue = safeParse(sampleValue); - const jsonType = getParsedJsonValueType(parsedValue); + const sampleValue = sampleDocument[column.name]; + const parsedValue = safeParse(sampleValue); + const jsonType = getParsedJsonValueType(parsedValue); - return { - ...column, - subtype: jsonType, - }; - }); + return { + ...column, + subtype: jsonType, + }; + }); }; const safeParse = json => { - try { - return JSON.parse(json); - } catch (error) { - return {}; - } + try { + return JSON.parse(json); + } catch (error) { + return {}; + } }; const getParsedJsonValueType = value => { - if (Array.isArray(value)) { - return 'array'; - } + if (Array.isArray(value)) { + return 'array'; + } - const type = typeof value; + const type = typeof value; - if (type === 'undefined') { - return 'object'; - } + if (type === 'undefined') { + return 'object'; + } - return type; + return type; }; const canHaveTimePrecision = columnDataType => { - return _.includes( - ['timestamp with time zone', 'timestamp without time zone', 'time with time zone', 'time without time zone'], - columnDataType - ); + return _.includes( + ['timestamp with time zone', 'timestamp without time zone', 'time with time zone', 'time without time zone'], + columnDataType, + ); }; module.exports = { - setDependencies, - mapColumnData, - setSubtypeFromSampledJsonValues, + setDependencies, + mapColumnData, + setSubtypeFromSampledJsonValues, }; diff --git a/reverse_engineering/helpers/postgresHelpers/common.js b/reverse_engineering/helpers/postgresHelpers/common.js index 5d79ca5..c55400a 100644 --- a/reverse_engineering/helpers/postgresHelpers/common.js +++ b/reverse_engineering/helpers/postgresHelpers/common.js @@ -1,21 +1,20 @@ let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const clearEmptyPropertiesInObject = obj => - _.chain(obj) - .toPairs() - .reject(([key, value]) => _.isNil(value)) - .fromPairs() - .value(); + _.chain(obj) + .toPairs() + .reject(([key, value]) => _.isNil(value)) + .fromPairs() + .value(); -const getColumnNameByPosition = columns => position => - _.find(columns, { ordinal_position: position })?.column_name; +const getColumnNameByPosition = columns => position => _.find(columns, { ordinal_position: position })?.column_name; module.exports = { - clearEmptyPropertiesInObject, - setDependencies, - getColumnNameByPosition + clearEmptyPropertiesInObject, + setDependencies, + getColumnNameByPosition, }; diff --git a/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js b/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js index 20e7282..c02c075 100644 --- a/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js @@ -3,25 +3,25 @@ const { getColumnNameByPosition } = require('./common'); let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const prepareForeignKeys = (tableForeignKeys, tableName, schemaName, columns) => { - return _.map(tableForeignKeys, foreignKeyData => { - return { - relationshipName: foreignKeyData.relationship_name, - dbName: foreignKeyData.foreign_table_schema, - parentCollection: foreignKeyData.foreign_table_name, - parentField: foreignKeyData.foreign_columns, - childDbName: schemaName, - childCollection: tableName, - childField: _.map(foreignKeyData.table_columns_positions, getColumnNameByPosition(columns)), - relationshipType: 'Foreign Key', - }; - }); + return _.map(tableForeignKeys, foreignKeyData => { + return { + relationshipName: foreignKeyData.relationship_name, + dbName: foreignKeyData.foreign_table_schema, + parentCollection: foreignKeyData.foreign_table_name, + parentField: foreignKeyData.foreign_columns, + childDbName: schemaName, + childCollection: tableName, + childField: _.map(foreignKeyData.table_columns_positions, getColumnNameByPosition(columns)), + relationshipType: 'Foreign Key', + }; + }); }; module.exports = { - setDependencies, - prepareForeignKeys, + setDependencies, + prepareForeignKeys, }; diff --git a/reverse_engineering/helpers/postgresHelpers/functionHelper.js b/reverse_engineering/helpers/postgresHelpers/functionHelper.js index ed4adf2..aa075d5 100644 --- a/reverse_engineering/helpers/postgresHelpers/functionHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/functionHelper.js @@ -1,95 +1,95 @@ let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const mapFunctionArgs = args => { - return _.map(args, arg => ({ - argumentMode: arg.parameter_mode, - argumentName: arg.parameter_name, - argumentType: getArgType(arg.data_type, arg.udt_name), - defaultExpression: arg.parameter_default, - })); + return _.map(args, arg => ({ + argumentMode: arg.parameter_mode, + argumentName: arg.parameter_name, + argumentType: getArgType(arg.data_type, arg.udt_name), + defaultExpression: arg.parameter_default, + })); }; const getArgType = (argType, argUdt) => { - if (argType === 'USER-DEFINED') { - return argUdt; - } + if (argType === 'USER-DEFINED') { + return argUdt; + } - if(argType === 'ARRAY') { - return argUdt.slice(1) + '[]' - } + if (argType === 'ARRAY') { + return argUdt.slice(1) + '[]'; + } - return argType; + return argType; }; const getVolatility = volatility => { - switch (volatility) { - case 'i': - return 'IMMUTABLE'; - case 's': - return 'STABLE'; - case 'v': - default: - return 'VOLATILE'; - } + switch (volatility) { + case 'i': + return 'IMMUTABLE'; + case 's': + return 'STABLE'; + case 'v': + default: + return 'VOLATILE'; + } }; const getParallel = parallel => { - switch (parallel) { - case 's': - return 'SAFE'; - case 'r': - return 'RESTICTED'; - case 'u': - return 'UNSAFE'; - default: - return ''; - } + switch (parallel) { + case 's': + return 'SAFE'; + case 'r': + return 'RESTICTED'; + case 'u': + return 'UNSAFE'; + default: + return ''; + } }; const getNullArgs = strict => { - if (strict) { - return 'STRICT'; - } + if (strict) { + return 'STRICT'; + } - return 'CALLED ON NULL INPUT'; + return 'CALLED ON NULL INPUT'; }; const mapFunctionData = (functionData, functionArgs, additionalData) => { - return { - name: functionData.name, - functionDescription: additionalData?.description, - functionArguments: mapFunctionArgs(functionArgs), - functionReturnsSetOf: additionalData?.returns_set, - functionReturnType: functionData.return_data_type, - functionLanguage: _.toLower(functionData.external_language), - functionBody: functionData.routine_definition, - functionWindow: additionalData.kind === 'w', - functionVolatility: getVolatility(additionalData?.volatility), - functionLeakProof: additionalData?.leak_proof, - functionNullArgs: getNullArgs(additionalData?.strict), - functionSqlSecurity: functionData.security_type, - functionParallel: getParallel(functionData.parallel), - functionExecutionCost: functionData.estimated_cost, - functionExecutionRows: functionData.estimated_rows, - }; + return { + name: functionData.name, + functionDescription: additionalData?.description, + functionArguments: mapFunctionArgs(functionArgs), + functionReturnsSetOf: additionalData?.returns_set, + functionReturnType: functionData.return_data_type, + functionLanguage: _.toLower(functionData.external_language), + functionBody: functionData.routine_definition, + functionWindow: additionalData?.kind === 'w', + functionVolatility: getVolatility(additionalData?.volatility), + functionLeakProof: additionalData?.leak_proof, + functionNullArgs: getNullArgs(additionalData?.strict), + functionSqlSecurity: functionData.security_type, + functionParallel: getParallel(functionData.parallel), + functionExecutionCost: functionData.estimated_cost, + functionExecutionRows: functionData.estimated_rows, + }; }; const mapProcedureData = (functionData, functionArgs, additionalData) => { - return { - name: functionData.name, - description: additionalData?.description, - language: _.toLower(functionData.external_language), - inputArgs: mapFunctionArgs(functionArgs), - body: functionData.routine_definition, - }; + return { + name: functionData.name, + description: additionalData?.description, + language: _.toLower(functionData.external_language), + inputArgs: mapFunctionArgs(functionArgs), + body: functionData.routine_definition, + }; }; module.exports = { - setDependencies, - mapFunctionData, - mapProcedureData, + setDependencies, + mapFunctionData, + mapProcedureData, }; diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 54107c8..7ef892d 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -3,349 +3,349 @@ const { clearEmptyPropertiesInObject, getColumnNameByPosition } = require('./com let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const prepareStorageParameters = (reloptions, tableToastOptions) => { - if (!reloptions && !tableToastOptions) { - return null; - } - - const options = prepareOptions(reloptions); - const toastOptions = prepareOptions(tableToastOptions?.toast_options); - - const fillfactor = options.fillfactor; - const parallel_workers = options.parallel_workers; - const autovacuum_enabled = options.autovacuum_enabled; - const autovacuum = clearEmptyPropertiesInObject({ - vacuum_index_cleanup: options.vacuum_index_cleanup, - vacuum_truncate: options.vacuum_truncate, - autovacuum_vacuum_threshold: options.autovacuum_vacuum_threshold, - autovacuum_vacuum_scale_factor: options.autovacuum_vacuum_scale_factor, - autovacuum_vacuum_insert_threshold: options.autovacuum_vacuum_insert_threshold, - autovacuum_vacuum_insert_scale_factor: options.autovacuum_vacuum_insert_scale_factor, - autovacuum_analyze_threshold: options.autovacuum_analyze_threshold, - autovacuum_analyze_scale_factor: options.autovacuum_analyze_scale_factor, - autovacuum_vacuum_cost_delay: options.autovacuum_vacuum_cost_delay, - autovacuum_vacuum_cost_limit: options.autovacuum_vacuum_cost_limit, - autovacuum_freeze_min_age: options.autovacuum_freeze_min_age, - autovacuum_freeze_max_age: options.autovacuum_freeze_max_age, - autovacuum_freeze_table_age: options.autovacuum_freeze_table_age, - autovacuum_multixact_freeze_min_age: options.autovacuum_multixact_freeze_min_age, - autovacuum_multixact_freeze_max_age: options.autovacuum_multixact_freeze_max_age, - autovacuum_multixact_freeze_table_age: options.autovacuum_multixact_freeze_table_age, - log_autovacuum_min_duration: options.log_autovacuum_min_duration, - }); - const user_catalog_table = options.user_catalog_table; - const toast_autovacuum_enabled = toastOptions.autovacuum_enabled; - const toast = clearEmptyPropertiesInObject({ - toast_tuple_target: options.toast_tuple_target, - toast_vacuum_index_cleanup: toastOptions.vacuum_index_cleanup, - toast_vacuum_truncate: toastOptions.vacuum_truncate, - toast_autovacuum_vacuum_threshold: toastOptions.autovacuum_vacuum_threshold, - toast_autovacuum_vacuum_scale_factor: toastOptions.autovacuum_vacuum_scale_factor, - toast_autovacuum_vacuum_insert_threshold: toastOptions.autovacuum_vacuum_insert_threshold, - toast_autovacuum_vacuum_insert_scale_factor: toastOptions.autovacuum_vacuum_insert_scale_factor, - toast_autovacuum_vacuum_cost_delay: toastOptions.autovacuum_vacuum_cost_delay, - toast_autovacuum_vacuum_cost_limit: toastOptions.autovacuum_vacuum_cost_limit, - toast_autovacuum_freeze_min_age: toastOptions.autovacuum_freeze_min_age, - toast_autovacuum_freeze_max_age: toastOptions.autovacuum_freeze_max_age, - toast_autovacuum_freeze_table_age: toastOptions.autovacuum_freeze_table_age, - toast_autovacuum_multixact_freeze_min_age: toastOptions.autovacuum_multixact_freeze_min_age, - toast_autovacuum_multixact_freeze_max_age: toastOptions.autovacuum_multixact_freeze_max_age, - toast_autovacuum_multixact_freeze_table_age: toastOptions.autovacuum_multixact_freeze_table_age, - toast_log_autovacuum_min_duration: toastOptions.log_autovacuum_min_duration, - }); - - const storage_parameter = { - fillfactor, - parallel_workers, - autovacuum_enabled, - autovacuum: _.isEmpty(autovacuum) ? null : autovacuum, - toast_autovacuum_enabled, - toast: _.isEmpty(toast) ? null : toast, - user_catalog_table, - }; - - return clearEmptyPropertiesInObject(storage_parameter); + if (!reloptions && !tableToastOptions) { + return null; + } + + const options = prepareOptions(reloptions); + const toastOptions = prepareOptions(tableToastOptions?.toast_options); + + const fillfactor = options.fillfactor; + const parallel_workers = options.parallel_workers; + const autovacuum_enabled = options.autovacuum_enabled; + const autovacuum = clearEmptyPropertiesInObject({ + vacuum_index_cleanup: options.vacuum_index_cleanup, + vacuum_truncate: options.vacuum_truncate, + autovacuum_vacuum_threshold: options.autovacuum_vacuum_threshold, + autovacuum_vacuum_scale_factor: options.autovacuum_vacuum_scale_factor, + autovacuum_vacuum_insert_threshold: options.autovacuum_vacuum_insert_threshold, + autovacuum_vacuum_insert_scale_factor: options.autovacuum_vacuum_insert_scale_factor, + autovacuum_analyze_threshold: options.autovacuum_analyze_threshold, + autovacuum_analyze_scale_factor: options.autovacuum_analyze_scale_factor, + autovacuum_vacuum_cost_delay: options.autovacuum_vacuum_cost_delay, + autovacuum_vacuum_cost_limit: options.autovacuum_vacuum_cost_limit, + autovacuum_freeze_min_age: options.autovacuum_freeze_min_age, + autovacuum_freeze_max_age: options.autovacuum_freeze_max_age, + autovacuum_freeze_table_age: options.autovacuum_freeze_table_age, + autovacuum_multixact_freeze_min_age: options.autovacuum_multixact_freeze_min_age, + autovacuum_multixact_freeze_max_age: options.autovacuum_multixact_freeze_max_age, + autovacuum_multixact_freeze_table_age: options.autovacuum_multixact_freeze_table_age, + log_autovacuum_min_duration: options.log_autovacuum_min_duration, + }); + const user_catalog_table = options.user_catalog_table; + const toast_autovacuum_enabled = toastOptions.autovacuum_enabled; + const toast = clearEmptyPropertiesInObject({ + toast_tuple_target: options.toast_tuple_target, + toast_vacuum_index_cleanup: toastOptions.vacuum_index_cleanup, + toast_vacuum_truncate: toastOptions.vacuum_truncate, + toast_autovacuum_vacuum_threshold: toastOptions.autovacuum_vacuum_threshold, + toast_autovacuum_vacuum_scale_factor: toastOptions.autovacuum_vacuum_scale_factor, + toast_autovacuum_vacuum_insert_threshold: toastOptions.autovacuum_vacuum_insert_threshold, + toast_autovacuum_vacuum_insert_scale_factor: toastOptions.autovacuum_vacuum_insert_scale_factor, + toast_autovacuum_vacuum_cost_delay: toastOptions.autovacuum_vacuum_cost_delay, + toast_autovacuum_vacuum_cost_limit: toastOptions.autovacuum_vacuum_cost_limit, + toast_autovacuum_freeze_min_age: toastOptions.autovacuum_freeze_min_age, + toast_autovacuum_freeze_max_age: toastOptions.autovacuum_freeze_max_age, + toast_autovacuum_freeze_table_age: toastOptions.autovacuum_freeze_table_age, + toast_autovacuum_multixact_freeze_min_age: toastOptions.autovacuum_multixact_freeze_min_age, + toast_autovacuum_multixact_freeze_max_age: toastOptions.autovacuum_multixact_freeze_max_age, + toast_autovacuum_multixact_freeze_table_age: toastOptions.autovacuum_multixact_freeze_table_age, + toast_log_autovacuum_min_duration: toastOptions.log_autovacuum_min_duration, + }); + + const storage_parameter = { + fillfactor, + parallel_workers, + autovacuum_enabled, + autovacuum: _.isEmpty(autovacuum) ? null : autovacuum, + toast_autovacuum_enabled, + toast: _.isEmpty(toast) ? null : toast, + user_catalog_table, + }; + + return clearEmptyPropertiesInObject(storage_parameter); }; const prepareTablePartition = (partitionResult, tableColumns) => { - if (!partitionResult) { - return null; - } - - const partitionMethod = getPartitionMethod(partitionResult); - const isExpression = _.some(partitionResult.partition_attributes_positions, position => position === 0); - const key = isExpression ? 'partitioning_expression' : 'compositePartitionKey'; - const value = isExpression - ? getPartitionExpression(partitionResult, tableColumns) - : _.map(partitionResult.partition_attributes_positions, getColumnNameByPosition(tableColumns)); - - return [ - { - partitionMethod, - partitionBy: isExpression ? 'expression' : 'keys', - [key]: value, - }, - ]; + if (!partitionResult) { + return null; + } + + const partitionMethod = getPartitionMethod(partitionResult); + const isExpression = _.some(partitionResult.partition_attributes_positions, position => position === 0); + const key = isExpression ? 'partitioning_expression' : 'compositePartitionKey'; + const value = isExpression + ? getPartitionExpression(partitionResult, tableColumns) + : _.map(partitionResult.partition_attributes_positions, getColumnNameByPosition(tableColumns)); + + return [ + { + partitionMethod, + partitionBy: isExpression ? 'expression' : 'keys', + [key]: value, + }, + ]; }; const getPartitionMethod = partitionResult => { - const type = partitionResult.partition_method; - - switch (type) { - case 'h': - return 'HASH'; - case 'l': - return 'LIST'; - case 'r': - return 'RANGE'; - default: - return ''; - } + const type = partitionResult.partition_method; + + switch (type) { + case 'h': + return 'HASH'; + case 'l': + return 'LIST'; + case 'r': + return 'RANGE'; + default: + return ''; + } }; const getPartitionExpression = (partitionResult, tableColumns) => { - let expressionIndex = 0; - const expressions = _.split(partitionResult.expressions, ','); - - return _.chain(partitionResult.partition_attributes_positions) - .map(attributePosition => { - if (attributePosition === 0) { - const expression = expressions[expressionIndex]; - expressionIndex++; - - return expression; - } - - return getColumnNameByPosition(tableColumns)(attributePosition); - }) - .join(',') - .value(); + let expressionIndex = 0; + const expressions = _.split(partitionResult.expressions, ','); + + return _.chain(partitionResult.partition_attributes_positions) + .map(attributePosition => { + if (attributePosition === 0) { + const expression = expressions[expressionIndex]; + expressionIndex++; + + return expression; + } + + return getColumnNameByPosition(tableColumns)(attributePosition); + }) + .join(',') + .value(); }; const splitByEqualitySymbol = item => _.split(item, '='); const checkHaveJsonTypes = columns => { - return _.find(columns, { type: 'json' }); + return _.find(columns, { type: 'json' }); }; const getLimit = (count, recordSamplingSettings) => { - const per = recordSamplingSettings.relative.value; - const size = - recordSamplingSettings.active === 'absolute' - ? recordSamplingSettings.absolute.value - : Math.round((count / 100) * per); - return Math.min(size, 100000); + const per = recordSamplingSettings.relative.value; + const size = + recordSamplingSettings.active === 'absolute' + ? recordSamplingSettings.absolute.value + : Math.round((count / 100) * per); + return Math.min(size, 100000); }; const prepareTableConstraints = (constraintsResult, attributesWithPositions) => { - return _.reduce( - constraintsResult, - (entityConstraints, constraint) => { - switch (constraint.constraint_type) { - case 'c': - return { - ...entityConstraints, - chkConstr: [...entityConstraints.chkConstr, getCheckConstraint(constraint)], - }; - case 'p': - return { - ...entityConstraints, - primaryKey: [ - ...entityConstraints.primaryKey, - getPrimaryKeyConstraint(constraint, attributesWithPositions), - ], - }; - case 'u': - return { - ...entityConstraints, - uniqueKey: [ - ...entityConstraints.uniqueKey, - getUniqueKeyConstraint(constraint, attributesWithPositions), - ], - }; - default: - return entityConstraints; - } - }, - { - chkConstr: [], - uniqueKey: [], - primaryKey: [], - } - ); + return _.reduce( + constraintsResult, + (entityConstraints, constraint) => { + switch (constraint.constraint_type) { + case 'c': + return { + ...entityConstraints, + chkConstr: [...entityConstraints.chkConstr, getCheckConstraint(constraint)], + }; + case 'p': + return { + ...entityConstraints, + primaryKey: [ + ...entityConstraints.primaryKey, + getPrimaryKeyConstraint(constraint, attributesWithPositions), + ], + }; + case 'u': + return { + ...entityConstraints, + uniqueKey: [ + ...entityConstraints.uniqueKey, + getUniqueKeyConstraint(constraint, attributesWithPositions), + ], + }; + default: + return entityConstraints; + } + }, + { + chkConstr: [], + uniqueKey: [], + primaryKey: [], + }, + ); }; const getPrimaryKeyConstraint = (constraint, tableColumns) => { - return { - constraintName: constraint.constraint_name, - compositePrimaryKey: _.map(constraint.constraint_keys, getColumnNameByPosition(tableColumns)), - indexStorageParameters: _.join(constraint.storage_parameters, ','), - indexTablespace: constraint.tablespace, - }; + return { + constraintName: constraint.constraint_name, + compositePrimaryKey: _.map(constraint.constraint_keys, getColumnNameByPosition(tableColumns)), + indexStorageParameters: _.join(constraint.storage_parameters, ','), + indexTablespace: constraint.tablespace, + }; }; const getUniqueKeyConstraint = (constraint, tableColumns) => { - return { - constraintName: constraint.constraint_name, - compositeUniqueKey: _.map(constraint.constraint_keys, getColumnNameByPosition(tableColumns)), - indexStorageParameters: _.join(constraint.storage_parameters, ','), - indexTablespace: constraint.tablespace, - indexComment: constraint.description, - }; + return { + constraintName: constraint.constraint_name, + compositeUniqueKey: _.map(constraint.constraint_keys, getColumnNameByPosition(tableColumns)), + indexStorageParameters: _.join(constraint.storage_parameters, ','), + indexTablespace: constraint.tablespace, + indexComment: constraint.description, + }; }; const getCheckConstraint = constraint => { - return { - chkConstrName: constraint.constraint_name, - constrExpression: constraint.expression, - noInherit: constraint.no_inherit, - constrDescription: constraint.description, - }; + return { + chkConstrName: constraint.constraint_name, + constrExpression: constraint.expression, + noInherit: constraint.no_inherit, + constrDescription: constraint.description, + }; }; const prepareTableIndexes = tableIndexesResult => { - return _.map(tableIndexesResult, indexData => { - const allColumns = mapIndexColumns(indexData); - const columns = _.slice(allColumns, 0, indexData.number_of_keys); - const include = _.chain(allColumns) - .slice(indexData.number_of_keys) - .map(column => _.pick(column, 'name')) - .value(); - - const index = { - indxName: indexData.indexname, - index_method: indexData.index_method, - unique: indexData.index_unique ?? false, - index_tablespace_name: indexData.tablespace_name || '', - index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters), - where: indexData.where_expression || '', - include, - columns: - indexData.index_method === 'btree' - ? columns - : _.map(columns, column => _.omit(column, 'sortOrder', 'nullsOrder')), - }; - - return clearEmptyPropertiesInObject(index); - }); + return _.map(tableIndexesResult, indexData => { + const allColumns = mapIndexColumns(indexData); + const columns = _.slice(allColumns, 0, indexData.number_of_keys); + const include = _.chain(allColumns) + .slice(indexData.number_of_keys) + .map(column => _.pick(column, 'name')) + .value(); + + const index = { + indxName: indexData.indexname, + index_method: indexData.index_method, + unique: indexData.index_unique ?? false, + index_tablespace_name: indexData.tablespace_name || '', + index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters), + where: indexData.where_expression || '', + include, + columns: + indexData.index_method === 'btree' + ? columns + : _.map(columns, column => _.omit(column, 'sortOrder', 'nullsOrder')), + }; + + return clearEmptyPropertiesInObject(index); + }); }; const mapIndexColumns = indexData => { - return _.chain(indexData.columns) - .map((columnName, itemIndex) => { - if (!columnName) { - return; - } - - const sortOrder = _.get(indexData, `ascendings.${itemIndex}`, false) ? 'ASC' : 'DESC'; - const nullsOrder = getNullsOrder(_.get(indexData, `nulls_first.${itemIndex}`)); - const opclass = _.get(indexData, `opclasses.${itemIndex}`, ''); - const collation = _.get(indexData, `collations.${itemIndex}`, ''); - - return { - name: columnName, - sortOrder, - nullsOrder, - opclass, - collation, - }; - }) - .compact() - .value(); + return _.chain(indexData.columns) + .map((columnName, itemIndex) => { + if (!columnName) { + return; + } + + const sortOrder = _.get(indexData, `ascendings.${itemIndex}`, false) ? 'ASC' : 'DESC'; + const nullsOrder = getNullsOrder(_.get(indexData, `nulls_first.${itemIndex}`)); + const opclass = _.get(indexData, `opclasses.${itemIndex}`, ''); + const collation = _.get(indexData, `collations.${itemIndex}`, ''); + + return { + name: columnName, + sortOrder, + nullsOrder, + opclass, + collation, + }; + }) + .compact() + .value(); }; const getNullsOrder = nulls_first => { - if (_.isNil(nulls_first)) { - return ''; - } + if (_.isNil(nulls_first)) { + return ''; + } - return nulls_first ? 'NULLS FIRST' : 'NULLS LAST'; + return nulls_first ? 'NULLS FIRST' : 'NULLS LAST'; }; const getIndexStorageParameters = storageParameters => { - if (!storageParameters) { - return null; - } - - const params = _.fromPairs(_.map(storageParameters, param => splitByEqualitySymbol(param))); - - const data = { - index_fillfactor: params.fillfactor, - deduplicate_items: params.deduplicate_items, - index_buffering: params.index_buffering, - fastupdate: params.fastupdate, - gin_pending_list_limit: params.gin_pending_list_limit, - pages_per_range: params.pages_per_range, - autosummarize: params.autosummarize, - }; - - return clearEmptyPropertiesInObject(data); + if (!storageParameters) { + return null; + } + + const params = _.fromPairs(_.map(storageParameters, param => splitByEqualitySymbol(param))); + + const data = { + index_fillfactor: params.fillfactor, + deduplicate_items: params.deduplicate_items, + index_buffering: params.index_buffering, + fastupdate: params.fastupdate, + gin_pending_list_limit: params.gin_pending_list_limit, + pages_per_range: params.pages_per_range, + autosummarize: params.autosummarize, + }; + + return clearEmptyPropertiesInObject(data); }; const prepareTableLevelData = (tableLevelData, tableToastOptions) => { - const temporary = tableLevelData?.relpersistence === 't'; - const unlogged = tableLevelData?.relpersistence === 'u'; - const storage_parameter = prepareStorageParameters(tableLevelData?.reloptions, tableToastOptions); - const table_tablespace_name = tableLevelData?.spcname; - - return { - temporary, - unlogged, - storage_parameter, - table_tablespace_name, - }; + const temporary = tableLevelData?.relpersistence === 't'; + const unlogged = tableLevelData?.relpersistence === 'u'; + const storage_parameter = prepareStorageParameters(tableLevelData?.reloptions, tableToastOptions); + const table_tablespace_name = tableLevelData?.spcname; + + return { + temporary, + unlogged, + storage_parameter, + table_tablespace_name, + }; }; const convertValueToType = value => { - switch (getTypeOfValue(value)) { - case 'number': - case 'boolean': - return JSON.parse(value); - case 'string': - default: - return value; - } + switch (getTypeOfValue(value)) { + case 'number': + case 'boolean': + return JSON.parse(value); + case 'string': + default: + return value; + } }; const getTypeOfValue = value => { - try { - const type = typeof JSON.parse(value); + try { + const type = typeof JSON.parse(value); - if (type === 'object') { - return 'string'; - } + if (type === 'object') { + return 'string'; + } - return type; - } catch (error) { - return 'string'; - } + return type; + } catch (error) { + return 'string'; + } }; const prepareOptions = options => { - return ( - _.chain(options) - .map(splitByEqualitySymbol) - .map(([key, value]) => [key, convertValueToType(value)]) - .fromPairs() - .value() || {} - ); + return ( + _.chain(options) + .map(splitByEqualitySymbol) + .map(([key, value]) => [key, convertValueToType(value)]) + .fromPairs() + .value() || {} + ); }; const prepareTableInheritance = (schemaName, inheritanceResult) => { - return _.map(inheritanceResult, ({ parent_table_name }) => ({ parentTable: [schemaName, parent_table_name] })); + return _.map(inheritanceResult, ({ parent_table_name }) => ({ parentTable: [schemaName, parent_table_name] })); }; module.exports = { - prepareStorageParameters, - prepareTablePartition, - setDependencies, - checkHaveJsonTypes, - prepareTableConstraints, - prepareTableLevelData, - prepareTableIndexes, - getLimit, - prepareTableInheritance, + prepareStorageParameters, + prepareTablePartition, + setDependencies, + checkHaveJsonTypes, + prepareTableConstraints, + prepareTableLevelData, + prepareTableIndexes, + getLimit, + prepareTableInheritance, }; diff --git a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js index b1e13f6..2de4810 100644 --- a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js @@ -3,134 +3,134 @@ const { mapColumnData } = require('./columnHelper'); let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const getUserDefinedTypes = (udtResponse, domainTypes) => { - return _.chain(udtResponse) - .map(typeData => { - switch (typeData.type) { - case 'e': - return getEnumType(typeData); - case 'r': - return getRangeType(typeData); - case 'c': - return getCompositeType(typeData); - default: - return null; - } - }) - .compact() - .concat(_.map(domainTypes, mapDomainType)) - .value(); + return _.chain(udtResponse) + .map(typeData => { + switch (typeData.type) { + case 'e': + return getEnumType(typeData); + case 'r': + return getRangeType(typeData); + case 'c': + return getCompositeType(typeData); + default: + return null; + } + }) + .compact() + .concat(_.map(domainTypes, mapDomainType)) + .value(); }; const getEnumType = typeData => { - return { - name: typeData.name, - type: 'enum', - enum: typeData.enum_values || [], - }; + return { + name: typeData.name, + type: 'enum', + enum: typeData.enum_values || [], + }; }; const getRangeType = typeData => { - return { - name: typeData.name, - type: 'range_udt', - rangeSubtype: typeData.range_subtype || '', - operatorClass: typeData.range_opclass_name || '', - collation: typeData.range_collation_name || '', - canonicalFunction: typeData.range_canonical_proc || '', - subtypeDiffFunction: typeData.range_diff_proc || '', - }; + return { + name: typeData.name, + type: 'range_udt', + rangeSubtype: typeData.range_subtype || '', + operatorClass: typeData.range_opclass_name || '', + collation: typeData.range_collation_name || '', + canonicalFunction: typeData.range_canonical_proc || '', + subtypeDiffFunction: typeData.range_diff_proc || '', + }; }; const getCompositeType = typeData => { - const columns = _.map(typeData.columns, mapColumnData([])); + const columns = _.map(typeData.columns, mapColumnData([])); - return { - name: typeData.name, - type: 'composite', - properties: columns, - }; + return { + name: typeData.name, + type: 'composite', + properties: columns, + }; }; const isTypeComposite = typeData => typeData.type === 'c'; const mapDomainType = domain => { - return { - name: domain.domain_name, - type: 'domain', - underlyingType: _.flow( - setLength(domain), - setPrecisionAndScale(domain), - setIntervalType(domain), - setIntervalPrecision(domain) - )(getUnderlyingType(domain)), - collation: domain.collation_name || '', - default: domain.domain_default || '', - required: _.first(domain.constraints)?.not_null, - checkConstraints: _.map(domain.constraints, mapDomainConstraint), - }; + return { + name: domain.domain_name, + type: 'domain', + underlyingType: _.flow( + setLength(domain), + setPrecisionAndScale(domain), + setIntervalType(domain), + setIntervalPrecision(domain), + )(getUnderlyingType(domain)), + collation: domain.collation_name || '', + default: domain.domain_default || '', + required: _.first(domain.constraints)?.not_null, + checkConstraints: _.map(domain.constraints, mapDomainConstraint), + }; }; const getUnderlyingType = domain => { - if (domain.data_type === 'USER-DEFINED') { - return domain.udt_name; - } + if (domain.data_type === 'USER-DEFINED') { + return domain.udt_name; + } - return domain.data_type; + return domain.data_type; }; const setLength = domain => type => { - if (domain.character_maximum_length) { - return `${type}(${domain.character_maximum_length})`; - } + if (domain.character_maximum_length) { + return `${type}(${domain.character_maximum_length})`; + } - return type; + return type; }; const setPrecisionAndScale = domain => type => { - if (type !== 'numeric') { - return type; - } + if (type !== 'numeric') { + return type; + } - if (_.isNumber(domain.numeric_precision) && _.isNumber(domain.numeric_scale)) { - return `${type}(${domain.numeric_precision},${domain.numeric_scale})`; - } + if (_.isNumber(domain.numeric_precision) && _.isNumber(domain.numeric_scale)) { + return `${type}(${domain.numeric_precision},${domain.numeric_scale})`; + } - if (_.isNumber(domain.numeric_precision)) { - return `${type}(${domain.numeric_precision})`; - } + if (_.isNumber(domain.numeric_precision)) { + return `${type}(${domain.numeric_precision})`; + } - return type; + return type; }; const setIntervalType = domain => type => { - if (domain.interval_type) { - return `${type} ${domain.interval_type}`; - } + if (domain.interval_type) { + return `${type} ${domain.interval_type}`; + } - return type; + return type; }; const setIntervalPrecision = domain => type => { - if (_.isNumber(domain.interval_precision)) { - return `${type}(${domain.interval_precision})`; - } + if (_.isNumber(domain.interval_precision)) { + return `${type}(${domain.interval_precision})`; + } - return type; + return type; }; const mapDomainConstraint = constraint => { - return { - name: constraint.constraint_name, - expression: constraint.expression, - }; + return { + name: constraint.constraint_name, + expression: constraint.expression, + }; }; module.exports = { - setDependencies, - getUserDefinedTypes, - isTypeComposite, + setDependencies, + getUserDefinedTypes, + isTypeComposite, }; diff --git a/reverse_engineering/helpers/postgresHelpers/viewHelper.js b/reverse_engineering/helpers/postgresHelpers/viewHelper.js index a62f4a5..3151748 100644 --- a/reverse_engineering/helpers/postgresHelpers/viewHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/viewHelper.js @@ -3,7 +3,7 @@ const { clearEmptyPropertiesInObject } = require('./common'); let _ = null; const setDependencies = app => { - _ = app.require('lodash'); + _ = app.require('lodash'); }; const VIEW_SUFFIX = ' (v)'; @@ -14,48 +14,48 @@ const removeViewNameSuffix = name => name.slice(0, -VIEW_SUFFIX.length); const setViewSuffix = name => `${name}${VIEW_SUFFIX}`; const generateCreateViewScript = (viewName, viewData, viewDefinitionFallback = {}) => { - const selectStatement = _.trim(viewData.view_definition || viewDefinitionFallback.definition || ''); + const selectStatement = _.trim(viewData.view_definition || viewDefinitionFallback.definition || ''); - if (!selectStatement) { - return '' - } + if (!selectStatement) { + return ''; + } - return `CREATE VIEW ${viewName} AS ${selectStatement}`; + return `CREATE VIEW ${viewName} AS ${selectStatement}`; }; const prepareViewData = (viewData, viewOptions) => { - const data = { - withCheckOption: viewData.check_option !== 'NONE' || _.isNil(viewData.check_option), - checkTestingScope: getCheckTestingScope(viewData.check_option), - viewOptions: _.fromPairs(_.map(viewOptions?.view_options, splitByEqualitySymbol)), - temporary: viewOptions?.persistence === 't', - recursive: isViewRecursive(viewData), - description: viewOptions?.description, - }; - - return clearEmptyPropertiesInObject(data); + const data = { + withCheckOption: viewData.check_option !== 'NONE' || _.isNil(viewData.check_option), + checkTestingScope: getCheckTestingScope(viewData.check_option), + viewOptions: _.fromPairs(_.map(viewOptions?.view_options, splitByEqualitySymbol)), + temporary: viewOptions?.persistence === 't', + recursive: isViewRecursive(viewData), + description: viewOptions?.description, + }; + + return clearEmptyPropertiesInObject(data); }; const getCheckTestingScope = check_option => { - if (check_option === 'NONE') { - return ''; - } + if (check_option === 'NONE') { + return ''; + } - return check_option; + return check_option; }; const isViewRecursive = viewData => { - return _.startsWith(_.trim(viewData.view_definition), 'WITH RECURSIVE'); + return _.startsWith(_.trim(viewData.view_definition), 'WITH RECURSIVE'); }; const splitByEqualitySymbol = item => _.split(item, '='); module.exports = { - setDependencies, - isViewByTableType, - isViewByName, - removeViewNameSuffix, - generateCreateViewScript, - setViewSuffix, - prepareViewData, + setDependencies, + isViewByTableType, + isViewByName, + removeViewNameSuffix, + generateCreateViewScript, + setViewSuffix, + prepareViewData, }; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 9561555..cdb49d3 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -2,364 +2,402 @@ const { createClient } = require('./connectionHelper'); const db = require('./db'); const { getJsonSchema } = require('./getJsonSchema'); const { - setDependencies: setDependenciesInColumnHelper, - mapColumnData, - setSubtypeFromSampledJsonValues, + setDependencies: setDependenciesInColumnHelper, + mapColumnData, + setSubtypeFromSampledJsonValues, } = require('./postgresHelpers/columnHelper'); const { - setDependencies: setDependenciesInCommonHelper, - clearEmptyPropertiesInObject, + setDependencies: setDependenciesInCommonHelper, + clearEmptyPropertiesInObject, } = require('./postgresHelpers/common'); const { - setDependencies: setDependenciesInForeignKeysHelper, - prepareForeignKeys, + setDependencies: setDependenciesInForeignKeysHelper, + prepareForeignKeys, } = require('./postgresHelpers/foreignKeysHelper'); const { - setDependencies: setFunctionHelperDependencies, - mapFunctionData, - mapProcedureData, + setDependencies: setFunctionHelperDependencies, + mapFunctionData, + mapProcedureData, } = require('./postgresHelpers/functionHelper'); const { - setDependencies: setDependenciesInTableHelper, - prepareTablePartition, - checkHaveJsonTypes, - prepareTableConstraints, - getLimit, - prepareTableLevelData, - prepareTableIndexes, - prepareTableInheritance, + setDependencies: setDependenciesInTableHelper, + prepareTablePartition, + checkHaveJsonTypes, + prepareTableConstraints, + getLimit, + prepareTableLevelData, + prepareTableIndexes, + prepareTableInheritance, } = require('./postgresHelpers/tableHelper'); const { - setDependencies: setDependenciesInUserDefinedTypesHelper, - getUserDefinedTypes, - isTypeComposite, + setDependencies: setDependenciesInUserDefinedTypesHelper, + getUserDefinedTypes, + isTypeComposite, } = require('./postgresHelpers/userDefinedTypesHelper'); const { - setDependencies: setViewDependenciesInViewHelper, - isViewByTableType, - isViewByName, - removeViewNameSuffix, - generateCreateViewScript, - setViewSuffix, - prepareViewData, + setDependencies: setViewDependenciesInViewHelper, + isViewByTableType, + isViewByName, + removeViewNameSuffix, + generateCreateViewScript, + setViewSuffix, + prepareViewData, } = require('./postgresHelpers/viewHelper'); const queryConstants = require('./queryConstants'); let currentSshTunnel = null; let _ = null; let logger = null; +let version = 14; module.exports = { - setDependencies(app) { - _ = app.require('lodash'); - setDependenciesInCommonHelper(app); - setDependenciesInTableHelper(app); - setDependenciesInColumnHelper(app); - setDependenciesInForeignKeysHelper(app); - setViewDependenciesInViewHelper(app); - setFunctionHelperDependencies(app); - setDependenciesInUserDefinedTypesHelper(app); - }, - - async connect(connectionInfo, specificLogger) { - if (db.isClientInitialized()) { - await this.disconnect(); - } - - const { client, sshTunnel } = await createClient(connectionInfo, specificLogger); - - db.initializeClient(client, specificLogger); - currentSshTunnel = sshTunnel; - logger = specificLogger; - }, - - async disconnect() { - if (currentSshTunnel) { - currentSshTunnel.close(); - currentSshTunnel = null; - } - - await db.releaseClient(); - }, - - pingDb() { - return db.query(queryConstants.PING); - }, - - applyScript(script) { - return db.query(script); - }, - - async getDatabaseNames() { - return _.map(await db.query(queryConstants.GET_DATABASES), 'database_name'); - }, - - async logVersion() { - const versionRow = await db.queryTolerant(queryConstants.GET_VERSION, [], true); - const version = versionRow?.version || 'Version not retrieved'; - - logger.info(`PostgreSQL version: ${version}`); - }, - - async getAllSchemasNames() { - const schemaNames = await db.query(queryConstants.GET_SCHEMA_NAMES); - - return schemaNames.map(({ schema_name }) => schema_name).filter(schemaName => !isSystemSchema(schemaName)); - }, - - async getTablesNames(schemaName) { - const tables = await db.query(queryConstants.GET_TABLE_NAMES, [schemaName]); - - const tableTypesToExclude = ['FOREIGN TABLE']; - - return tables - .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) - .map(({ table_name, table_type }) => { - if (isViewByTableType(table_type)) { - return setViewSuffix(table_name); - } else { - return table_name; - } - }); - }, - - async getDbLevelData() { - logger.progress('Get database data'); - - const database_name = (await db.queryTolerant(queryConstants.GET_DB_NAME, [], true))?.current_database; - const encoding = (await db.queryTolerant(queryConstants.GET_DB_ENCODING, [], true))?.server_encoding; - const LC_COLLATE = (await db.queryTolerant(queryConstants.GET_DB_COLLATE_NAME, [], true))?.default_collate_name; - - return clearEmptyPropertiesInObject({ - database_name, - encoding, - LC_COLLATE, - LC_CTYPE: LC_COLLATE, - }); - }, - - async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { - const userDefinedTypes = await this._retrieveUserDefinedTypes(schemaName); - const schemaOidResult = await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true); - const schemaOid = schemaOidResult?.oid; - - const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); - - const tables = await mapPromises( - tablesNames, - _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName, userDefinedTypes) - ); - - const views = await mapPromises(viewsNames, _.bind(this._retrieveSingleViewData, this, schemaOid, schemaName)); - - return { views, tables, modelDefinitions: getJsonSchema(userDefinedTypes) }; - }, - - async retrieveFunctionsWithProcedures(schemaName) { - logger.progress('Get Functions and Procedures', schemaName); - - const schemaOid = (await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true))?.oid; - - const functionsWithProcedures = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES, [ - schemaName, - ]); - const functionAdditionalData = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL, [ - schemaOid, - ]); - const [functions, procedures] = _.partition(_.filter(functionsWithProcedures, 'routine_type'), { - routine_type: 'FUNCTION', - }); - - const userDefinedFunctions = await mapPromises(functions, async functionData => { - const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ - functionData.specific_name, - ]); - const additionalData = _.find(functionAdditionalData, { function_name: functionData.name }); - - return mapFunctionData(functionData, functionArgs, additionalData); - }); - - const userDefinedProcedures = await mapPromises(procedures, async functionData => { - const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ - functionData.specific_name, - ]); - const additionalData = _.find(functionAdditionalData, { function_name: functionData.name }); - - return mapProcedureData(functionData, functionArgs, additionalData); - }); - - return { functions: userDefinedFunctions, procedures: userDefinedProcedures }; - }, - - async _retrieveUserDefinedTypes(schemaName) { - logger.progress('Get User-Defined Types', schemaName); - - const userDefinedTypes = await db.queryTolerant(queryConstants.GET_USER_DEFINED_TYPES, [schemaName]); - const domainTypes = await db.queryTolerant(queryConstants.GET_DOMAIN_TYPES, [schemaName]); - - const udtsWithColumns = await mapPromises(userDefinedTypes, async typeData => { - if (isTypeComposite(typeData)) { - return { - ...typeData, - columns: await db.queryTolerant(queryConstants.GET_COMPOSITE_TYPE_COLUMNS, [typeData.pg_class_oid]), - }; - } - - return typeData; - }); - - const domainTypesWithConstraints = await mapPromises(domainTypes, async typeData => { - return { - ...typeData, - constraints: await db.queryTolerant(queryConstants.GET_DOMAIN_TYPES_CONSTRAINTS, [ - typeData.domain_name, - schemaName, - ]), - }; - }); - - return getUserDefinedTypes(udtsWithColumns, domainTypesWithConstraints); - }, - - async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, userDefinedTypes, tableName) { - logger.progress('Get table data', schemaName, tableName); - - const tableLevelData = await db.queryTolerant( - queryConstants.GET_TABLE_LEVEL_DATA, - [tableName, schemaOid], - true - ); - const tableOid = tableLevelData?.oid; - - const tableToastOptions = await db.queryTolerant( - queryConstants.GET_TABLE_TOAST_OPTIONS, - [tableName, schemaOid], - true - ); - const partitionResult = await db.queryTolerant(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid], true); - const tableColumns = await this._getTableColumns(tableName, schemaName, tableOid); - const descriptionResult = await db.queryTolerant(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid], true); - const inheritsResult = await db.queryTolerant(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid]); - const tableConstraintsResult = await db.queryTolerant(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid]); - const tableIndexesResult = await db.queryTolerant(queryConstants.GET_TABLE_INDEXES, [tableOid]); - const tableForeignKeys = await db.queryTolerant(queryConstants.GET_TABLE_FOREIGN_KEYS, [tableOid]); - - const partitioning = prepareTablePartition(partitionResult, tableColumns); - const tableLevelProperties = prepareTableLevelData(tableLevelData, tableToastOptions); - const description = getDescriptionFromResult(descriptionResult); - const inherits = prepareTableInheritance(schemaName, inheritsResult); - const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); - const tableIndexes = prepareTableIndexes(tableIndexesResult); - const relationships = prepareForeignKeys(tableForeignKeys, tableName, schemaName, tableColumns); - - const tableData = { - partitioning, - description, - inherits, - Indxs: tableIndexes, - ...tableLevelProperties, - ...tableConstraint, - }; - - const entityLevel = clearEmptyPropertiesInObject(tableData); - - let targetAttributes = tableColumns.map(mapColumnData(userDefinedTypes)); - - const hasJsonTypes = checkHaveJsonTypes(targetAttributes); - let documents = []; - - if (hasJsonTypes) { - documents = await this._getDocuments(schemaName, tableName, targetAttributes, recordSamplingSettings); - targetAttributes = setSubtypeFromSampledJsonValues(targetAttributes, documents); - } - - return { - name: tableName, - entityLevel, - jsonSchema: getJsonSchema(targetAttributes), - documents, - relationships, - }; - }, - - async _getTableColumns(tableName, schemaName, tableOid) { - logger.progress('Get columns', schemaName, tableName); - - const tableColumns = await db.query(queryConstants.GET_TABLE_COLUMNS, [tableName, schemaName]); - const tableColumnsAdditionalData = await db.queryTolerant(queryConstants.GET_TABLE_COLUMNS_ADDITIONAL_DATA, [ - tableOid, - ]); - - return _.map(tableColumns, (columnData, index) => { - return { - ...columnData, - ...(_.find(tableColumnsAdditionalData, { name: columnData.column_name }) || {}), - }; - }); - }, - - async _getDocuments(schemaName, tableName, attributes, recordSamplingSettings) { - logger.progress('Sampling table', schemaName, tableName); - - const fullTableName = `${schemaName}.${tableName}`; - const quantity = - (await db.queryTolerant(queryConstants.GET_ROWS_COUNT(fullTableName), [], true))?.quantity || 0; - const limit = getLimit(quantity, recordSamplingSettings); - - const jsonColumns = _.chain(attributes) - .filter(({ type }) => _.includes(['json', 'jsonb'], type)) - .map('name') - .join(', ') - .value(); - - return await db.queryTolerant(queryConstants.GET_SAMPLED_DATA(fullTableName, jsonColumns), [limit]); - }, - - async _retrieveSingleViewData(schemaOid, schemaName, viewName) { - logger.progress('Get view data', schemaName, viewName); - - viewName = removeViewNameSuffix(viewName); - - const viewData = await db.query(queryConstants.GET_VIEW_DATA, [viewName, schemaName], true); - const viewDefinitionFallback = - !viewData.view_definition && - (await db.queryTolerant(queryConstants.GET_VIEW_SELECT_STMT_FALLBACK, [viewName, schemaName], true)); - const viewOptions = await db.queryTolerant(queryConstants.GET_VIEW_OPTIONS, [viewName, schemaOid], true); - - const script = generateCreateViewScript(viewName, viewData, viewDefinitionFallback); - const data = prepareViewData(viewData, viewOptions); - - if (!script) { - logger.info('View select statement was not retrieved', { schemaName, viewName }); - - return { - name: viewName, - data, - jsonSchema: { properties: [] }, - }; - } - - return { - name: viewName, - data, - ddl: { - script, - type: 'postgres', - }, - }; - }, + setDependencies(app) { + _ = app.require('lodash'); + setDependenciesInCommonHelper(app); + setDependenciesInTableHelper(app); + setDependenciesInColumnHelper(app); + setDependenciesInForeignKeysHelper(app); + setViewDependenciesInViewHelper(app); + setFunctionHelperDependencies(app); + setDependenciesInUserDefinedTypesHelper(app); + }, + + async connect(connectionInfo, specificLogger) { + if (db.isClientInitialized()) { + await this.disconnect(); + } + + const { client, sshTunnel } = await createClient(connectionInfo, specificLogger); + + db.initializeClient(client, specificLogger); + currentSshTunnel = sshTunnel; + logger = specificLogger; + version = await this._getServerVersion(); + }, + + async disconnect() { + if (currentSshTunnel) { + currentSshTunnel.close(); + currentSshTunnel = null; + } + + await db.releaseClient(); + }, + + pingDb() { + return db.query(queryConstants.PING); + }, + + applyScript(script) { + return db.query(script); + }, + + async getDatabaseNames() { + return _.map(await db.query(queryConstants.GET_DATABASES), 'database_name'); + }, + + async logVersion() { + const versionRow = await db.queryTolerant(queryConstants.GET_VERSION, [], true); + const version = versionRow?.version || 'Version not retrieved'; + + logger.info(`PostgreSQL version: ${version}`); + }, + + async getAllSchemasNames() { + const schemaNames = await db.query(queryConstants.GET_SCHEMA_NAMES); + + return schemaNames.map(({ schema_name }) => schema_name).filter(schemaName => !isSystemSchema(schemaName)); + }, + + async getTablesNames(schemaName) { + const tables = await db.query(queryConstants.GET_TABLE_NAMES, [schemaName]); + + const tableTypesToExclude = ['FOREIGN TABLE']; + + return tables + .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) + .map(({ table_name, table_type }) => { + if (isViewByTableType(table_type)) { + return setViewSuffix(table_name); + } else { + return table_name; + } + }); + }, + + async getDbLevelData() { + logger.progress('Get database data'); + + const database_name = (await db.queryTolerant(queryConstants.GET_DB_NAME, [], true))?.current_database; + const encoding = (await db.queryTolerant(queryConstants.GET_DB_ENCODING, [], true))?.server_encoding; + const LC_COLLATE = (await db.queryTolerant(queryConstants.GET_DB_COLLATE_NAME, [], true))?.default_collate_name; + + return clearEmptyPropertiesInObject({ + database_name, + encoding, + LC_COLLATE, + LC_CTYPE: LC_COLLATE, + }); + }, + + async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { + const userDefinedTypes = await this._retrieveUserDefinedTypes(schemaName); + const schemaOidResult = await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true); + const schemaOid = schemaOidResult?.oid; + + const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); + + const tables = await mapPromises( + tablesNames, + _.bind( + this._retrieveSingleTableData, + this, + recordSamplingSettings, + schemaOid, + schemaName, + userDefinedTypes, + ), + ); + + const views = await mapPromises(viewsNames, _.bind(this._retrieveSingleViewData, this, schemaOid, schemaName)); + + return { views, tables, modelDefinitions: getJsonSchema(userDefinedTypes) }; + }, + + async retrieveFunctionsWithProcedures(schemaName) { + logger.progress('Get Functions and Procedures', schemaName); + + const schemaOid = (await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true))?.oid; + + const functionsWithProcedures = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES, [ + schemaName, + ]); + const functionAdditionalData = await db.queryTolerant(getGetFunctionsAdditionalDataQuery(version), [ + schemaOid, + ]); + const [functions, procedures] = _.partition(_.filter(functionsWithProcedures, 'routine_type'), { + routine_type: 'FUNCTION', + }); + + const userDefinedFunctions = await mapPromises(functions, async functionData => { + const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ + functionData.specific_name, + ]); + const additionalData = _.find(functionAdditionalData, { function_name: functionData.name }); + + return mapFunctionData(functionData, functionArgs, additionalData); + }); + + const userDefinedProcedures = await mapPromises(procedures, async functionData => { + const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ + functionData.specific_name, + ]); + const additionalData = _.find(functionAdditionalData, { function_name: functionData.name }); + + return mapProcedureData(functionData, functionArgs, additionalData); + }); + + return { functions: userDefinedFunctions, procedures: userDefinedProcedures }; + }, + + async _retrieveUserDefinedTypes(schemaName) { + logger.progress('Get User-Defined Types', schemaName); + + const userDefinedTypes = await db.queryTolerant(queryConstants.GET_USER_DEFINED_TYPES, [schemaName]); + const domainTypes = await db.queryTolerant(queryConstants.GET_DOMAIN_TYPES, [schemaName]); + + const udtsWithColumns = await mapPromises(userDefinedTypes, async typeData => { + if (isTypeComposite(typeData)) { + return { + ...typeData, + columns: await db.queryTolerant(queryConstants.GET_COMPOSITE_TYPE_COLUMNS, [typeData.pg_class_oid]), + }; + } + + return typeData; + }); + + const domainTypesWithConstraints = await mapPromises(domainTypes, async typeData => { + return { + ...typeData, + constraints: await db.queryTolerant(queryConstants.GET_DOMAIN_TYPES_CONSTRAINTS, [ + typeData.domain_name, + schemaName, + ]), + }; + }); + + return getUserDefinedTypes(udtsWithColumns, domainTypesWithConstraints); + }, + + async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, userDefinedTypes, tableName) { + logger.progress('Get table data', schemaName, tableName); + + const tableLevelData = await db.queryTolerant( + queryConstants.GET_TABLE_LEVEL_DATA, + [tableName, schemaOid], + true, + ); + const tableOid = tableLevelData?.oid; + + const tableToastOptions = await db.queryTolerant( + queryConstants.GET_TABLE_TOAST_OPTIONS, + [tableName, schemaOid], + true, + ); + const partitionResult = await db.queryTolerant(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid], true); + const tableColumns = await this._getTableColumns(tableName, schemaName, tableOid); + const descriptionResult = await db.queryTolerant(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid], true); + const inheritsResult = await db.queryTolerant(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid]); + const tableConstraintsResult = await db.queryTolerant(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid]); + const tableIndexesResult = await db.queryTolerant(getGetIndexesQuery(version), [tableOid]); + const tableForeignKeys = await db.queryTolerant(queryConstants.GET_TABLE_FOREIGN_KEYS, [tableOid]); + + const partitioning = prepareTablePartition(partitionResult, tableColumns); + const tableLevelProperties = prepareTableLevelData(tableLevelData, tableToastOptions); + const description = getDescriptionFromResult(descriptionResult); + const inherits = prepareTableInheritance(schemaName, inheritsResult); + const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); + const tableIndexes = prepareTableIndexes(tableIndexesResult); + const relationships = prepareForeignKeys(tableForeignKeys, tableName, schemaName, tableColumns); + + const tableData = { + partitioning, + description, + inherits, + Indxs: tableIndexes, + ...tableLevelProperties, + ...tableConstraint, + }; + + const entityLevel = clearEmptyPropertiesInObject(tableData); + + let targetAttributes = tableColumns.map(mapColumnData(userDefinedTypes)); + + const hasJsonTypes = checkHaveJsonTypes(targetAttributes); + let documents = []; + + if (hasJsonTypes) { + documents = await this._getDocuments(schemaName, tableName, targetAttributes, recordSamplingSettings); + targetAttributes = setSubtypeFromSampledJsonValues(targetAttributes, documents); + } + + return { + name: tableName, + entityLevel, + jsonSchema: getJsonSchema(targetAttributes), + documents, + relationships, + }; + }, + + async _getTableColumns(tableName, schemaName, tableOid) { + logger.progress('Get columns', schemaName, tableName); + + const tableColumns = await db.query(queryConstants.GET_TABLE_COLUMNS, [tableName, schemaName]); + const tableColumnsAdditionalData = await db.queryTolerant(queryConstants.GET_TABLE_COLUMNS_ADDITIONAL_DATA, [ + tableOid, + ]); + + return _.map(tableColumns, (columnData, index) => { + return { + ...columnData, + ...(_.find(tableColumnsAdditionalData, { name: columnData.column_name }) || {}), + }; + }); + }, + + async _getDocuments(schemaName, tableName, attributes, recordSamplingSettings) { + logger.progress('Sampling table', schemaName, tableName); + + const fullTableName = `${schemaName}.${tableName}`; + const quantity = + (await db.queryTolerant(queryConstants.GET_ROWS_COUNT(fullTableName), [], true))?.quantity || 0; + const limit = getLimit(quantity, recordSamplingSettings); + + const jsonColumns = _.chain(attributes) + .filter(({ type }) => _.includes(['json', 'jsonb'], type)) + .map('name') + .join(', ') + .value(); + + return await db.queryTolerant(queryConstants.GET_SAMPLED_DATA(fullTableName, jsonColumns), [limit]); + }, + + async _retrieveSingleViewData(schemaOid, schemaName, viewName) { + logger.progress('Get view data', schemaName, viewName); + + viewName = removeViewNameSuffix(viewName); + + const viewData = await db.query(queryConstants.GET_VIEW_DATA, [viewName, schemaName], true); + const viewDefinitionFallback = + !viewData.view_definition && + (await db.queryTolerant(queryConstants.GET_VIEW_SELECT_STMT_FALLBACK, [viewName, schemaName], true)); + const viewOptions = await db.queryTolerant(queryConstants.GET_VIEW_OPTIONS, [viewName, schemaOid], true); + + const script = generateCreateViewScript(viewName, viewData, viewDefinitionFallback); + const data = prepareViewData(viewData, viewOptions); + + if (!script) { + logger.info('View select statement was not retrieved', { schemaName, viewName }); + + return { + name: viewName, + data, + jsonSchema: { properties: [] }, + }; + } + + return { + name: viewName, + data, + ddl: { + script, + type: 'postgres', + }, + }; + }, + + async _getServerVersion() { + const result = await db.queryTolerant(queryConstants.GET_VERSION_AS_NUM, [], true); + const serverVersionNum = _.toNumber(result?.server_version_num); + + if (serverVersionNum >= 100000 && serverVersionNum < 110000) { + return 10; + } else if (serverVersionNum >= 110000 && serverVersionNum < 120000) { + return 11; + } else if (serverVersionNum >= 120000 && serverVersionNum < 130000) { + return 12; + } else if (serverVersionNum >= 130000 && serverVersionNum < 140000) { + return 13; + } else if (serverVersionNum >= 140000 && serverVersionNum < 150000) { + return 14; + } + + return 14; + }, }; const isSystemSchema = schema_name => { - if (_.startsWith(schema_name, 'pg_')) { - return true; - } + if (_.startsWith(schema_name, 'pg_')) { + return true; + } - if (_.includes(['information_schema'], schema_name)) { - return true; - } + if (_.includes(['information_schema'], schema_name)) { + return true; + } - return false; + return false; +}; + +const getGetIndexesQuery = postgreVersion => { + return postgreVersion === 10 ? queryConstants.GET_TABLE_INDEXES_V_10 : queryConstants.GET_TABLE_INDEXES; +}; + +const getGetFunctionsAdditionalDataQuery = postgreVersion => { + return postgreVersion === 10 + ? queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL_V_10 + : queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL; }; const getDescriptionFromResult = result => result?.obj_description; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 6117786..3470c2c 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -1,70 +1,5 @@ -const queryConstants = { - PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', - GET_VERSION: 'SELECT version()', - GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', - GET_TABLE_NAMES: ` - SELECT table_name, table_type - FROM information_schema.tables - WHERE table_schema = $1 - ORDER BY table_name;`, - GET_NAMESPACE_OID: 'SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = $1', - GET_TABLE_LEVEL_DATA: ` - SELECT pc.oid, pc.relpersistence, pc.reloptions, pt.spcname - FROM pg_catalog.pg_class AS pc - LEFT JOIN pg_catalog.pg_tablespace AS pt - ON pc.reltablespace = pt.oid - WHERE pc.relname = $1 AND pc.relnamespace = $2;`, - GET_TABLE_TOAST_OPTIONS: ` - SELECT reloptions AS toast_options - FROM pg_catalog.pg_class - WHERE oid = - (SELECT reltoastrelid - FROM pg_catalog.pg_class - WHERE relname=$1 AND relnamespace = $2 - LIMIT 1)`, - GET_TABLE_PARTITION_DATA: ` - SELECT partstrat as partition_method, - partattrs::int2[] as partition_attributes_positions, - pg_catalog.pg_get_expr(partexprs, partrelid) AS expressions - FROM pg_catalog.pg_partitioned_table - WHERE partrelid = $1;`, - GET_TABLE_COLUMNS: ` - SELECT * FROM information_schema.columns - WHERE table_name = $1 AND table_schema = $2 - ORDER BY ordinal_position`, - GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` - SELECT pg_attribute.attname AS name, - pg_attribute.attndims AS number_of_array_dimensions, - pg_description.description, - pg_attribute.atttypmod AS attribute_mode - FROM pg_catalog.pg_attribute AS pg_attribute - LEFT JOIN pg_catalog.pg_description AS pg_description ON (pg_description.objsubid=pg_attribute.attnum - AND pg_description.objoid = pg_attribute.attrelid) - WHERE pg_attribute.attrelid = $1;`, - GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, - GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) AS quantity FROM ${fullTableName};`, - GET_SAMPLED_DATA: (fullTableName, jsonColumns) => `SELECT ${jsonColumns} FROM ${fullTableName} LIMIT $1;`, - GET_INHERITS_PARENT_TABLE_NAME: ` - SELECT pc.relname AS parent_table_name FROM pg_catalog.pg_inherits AS pi - INNER JOIN pg_catalog.pg_class AS pc - ON pc.oid = pi.inhparent - WHERE pi.inhrelid = $1;`, - GET_TABLE_CONSTRAINTS: ` - SELECT pcon.conname AS constraint_name, - pcon.contype AS constraint_type, - pcon.connoinherit AS no_inherit, - pcon.conkey AS constraint_keys, - pg_catalog.pg_get_expr(pcon.conbin, pcon.conrelid) AS expression, - obj_description(pcon.oid, 'pg_constraint') AS description, - pc.reloptions AS storage_parameters, - pt.spcname AS tablespace - FROM pg_catalog.pg_constraint AS pcon - LEFT JOIN pg_catalog.pg_class AS pc - ON pcon.conindid = pc.oid - LEFT JOIN pg_catalog.pg_tablespace AS pt - ON pc.reltablespace = pt.oid - WHERE pcon.conrelid = $1;`, - GET_TABLE_INDEXES: ` +const getGET_TABLE_INDEXES = postgresVersion => { + return ` SELECT indexname, index_method, index_unique, @@ -82,8 +17,8 @@ const queryConstants = { ORDER BY ord) AS ascendings, array_agg(nulls_first ORDER BY ord) AS nulls_first, - reloptions AS storage_parameters, - tablespace_name + reloptions AS storage_parameters, + tablespace_name FROM (SELECT ct.oid AS table_oid, c.relname AS indexname, @@ -91,9 +26,9 @@ const queryConstants = { indexes.indisunique AS index_unique, indexes.ord, attribute.attname, - c.reloptions, - tablespace_t.spcname AS tablespace_name, - indexes.indnkeyatts AS number_of_keys, + c.reloptions, + tablespace_t.spcname AS tablespace_name, + indexes.${postgresVersion === 10 ? 'indnatts' : 'indnkeyatts'} AS number_of_keys, pg_catalog.pg_get_expr(indpred, indrelid) AS where_expression, CASE WHEN collation_namespace.nspname is not null THEN format('%I.%I',collation_namespace.nspname,collation_t.collname) @@ -125,21 +60,93 @@ const queryConstants = { LEFT JOIN pg_catalog.pg_namespace collation_namespace ON (collation_namespace.oid=collation_t.collnamespace) LEFT JOIN pg_catalog.pg_opclass opclass_t ON (opclass_t.oid=indexes.class) LEFT JOIN pg_catalog.pg_namespace opclas_namespace ON (opclas_namespace.oid=opclass_t.opcnamespace) - LEFT JOIN pg_catalog.pg_tablespace tablespace_t ON (tablespace_t.oid = c.reltablespace)) s2 + LEFT JOIN pg_catalog.pg_tablespace tablespace_t ON (tablespace_t.oid = c.reltablespace)) s2 WHERE table_oid = $1 GROUP BY indexname, index_method, index_unique, - reloptions, + reloptions, number_of_keys, where_expression, - tablespace_name;`, - GET_TABLE_FOREIGN_KEYS: ` + tablespace_name; + `; +}; + +const queryConstants = { + PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', + GET_VERSION: 'SELECT version()', + GET_VERSION_AS_NUM: 'SHOW server_version_num;', + GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', + GET_TABLE_NAMES: ` + SELECT table_name, table_type + FROM information_schema.tables + WHERE table_schema = $1 + ORDER BY table_name;`, + GET_NAMESPACE_OID: 'SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = $1', + GET_TABLE_LEVEL_DATA: ` + SELECT pc.oid, pc.relpersistence, pc.reloptions, pt.spcname + FROM pg_catalog.pg_class AS pc + LEFT JOIN pg_catalog.pg_tablespace AS pt + ON pc.reltablespace = pt.oid + WHERE pc.relname = $1 AND pc.relnamespace = $2;`, + GET_TABLE_TOAST_OPTIONS: ` + SELECT reloptions AS toast_options + FROM pg_catalog.pg_class + WHERE oid = + (SELECT reltoastrelid + FROM pg_catalog.pg_class + WHERE relname=$1 AND relnamespace = $2 + LIMIT 1)`, + GET_TABLE_PARTITION_DATA: ` + SELECT partstrat as partition_method, + partattrs::int2[] as partition_attributes_positions, + pg_catalog.pg_get_expr(partexprs, partrelid) AS expressions + FROM pg_catalog.pg_partitioned_table + WHERE partrelid = $1;`, + GET_TABLE_COLUMNS: ` + SELECT * FROM information_schema.columns + WHERE table_name = $1 AND table_schema = $2 + ORDER BY ordinal_position`, + GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` + SELECT pg_attribute.attname AS name, + pg_attribute.attndims AS number_of_array_dimensions, + pg_description.description, + pg_attribute.atttypmod AS attribute_mode + FROM pg_catalog.pg_attribute AS pg_attribute + LEFT JOIN pg_catalog.pg_description AS pg_description ON (pg_description.objsubid=pg_attribute.attnum + AND pg_description.objoid = pg_attribute.attrelid) + WHERE pg_attribute.attrelid = $1;`, + GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, + GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) AS quantity FROM ${fullTableName};`, + GET_SAMPLED_DATA: (fullTableName, jsonColumns) => `SELECT ${jsonColumns} FROM ${fullTableName} LIMIT $1;`, + GET_INHERITS_PARENT_TABLE_NAME: ` + SELECT pc.relname AS parent_table_name FROM pg_catalog.pg_inherits AS pi + INNER JOIN pg_catalog.pg_class AS pc + ON pc.oid = pi.inhparent + WHERE pi.inhrelid = $1;`, + GET_TABLE_CONSTRAINTS: ` + SELECT pcon.conname AS constraint_name, + pcon.contype AS constraint_type, + pcon.connoinherit AS no_inherit, + pcon.conkey AS constraint_keys, + pg_catalog.pg_get_expr(pcon.conbin, pcon.conrelid) AS expression, + obj_description(pcon.oid, 'pg_constraint') AS description, + pc.reloptions AS storage_parameters, + pt.spcname AS tablespace + FROM pg_catalog.pg_constraint AS pcon + LEFT JOIN pg_catalog.pg_class AS pc + ON pcon.conindid = pc.oid + LEFT JOIN pg_catalog.pg_tablespace AS pt + ON pc.reltablespace = pt.oid + WHERE pcon.conrelid = $1;`, + GET_TABLE_INDEXES: getGET_TABLE_INDEXES(), + GET_TABLE_INDEXES_V_10: getGET_TABLE_INDEXES(10), + GET_TABLE_FOREIGN_KEYS: ` SELECT pcon.conname AS relationship_name, pcon.conkey AS table_columns_positions, pc_foreign_table.relname AS foreign_table_name, ARRAY( - SELECT column_name FROM unnest(pcon.confkey) AS column_position + SELECT column_name::text FROM unnest(pcon.confkey) AS column_position JOIN information_schema.columns ON (ordinal_position = column_position) WHERE table_name = pc_foreign_table.relname AND table_schema = foreign_table_namespace.nspname)::text[] AS foreign_columns, foreign_table_namespace.nspname AS foreign_table_schema @@ -149,15 +156,15 @@ const queryConstants = { LEFT JOIN pg_catalog.pg_class AS pc_foreign_table ON (pcon.confrelid = pc_foreign_table.oid) JOIN pg_catalog.pg_namespace AS foreign_table_namespace ON (pc_foreign_table.relnamespace = foreign_table_namespace.oid) WHERE pcon.conrelid = $1 AND pcon.contype = 'f';`, - GET_VIEW_DATA: `SELECT * FROM information_schema.views WHERE table_name = $1 AND table_schema = $2;`, - GET_VIEW_SELECT_STMT_FALLBACK: `SELECT definition FROM pg_views WHERE viewname = $1 AND schemaname = $2;`, - GET_VIEW_OPTIONS: ` + GET_VIEW_DATA: `SELECT * FROM information_schema.views WHERE table_name = $1 AND table_schema = $2;`, + GET_VIEW_SELECT_STMT_FALLBACK: `SELECT definition FROM pg_views WHERE viewname = $1 AND schemaname = $2;`, + GET_VIEW_OPTIONS: ` SELECT reloptions AS view_options, relpersistence AS persistence, obj_description(oid, 'pg_class') AS description FROM pg_catalog.pg_class WHERE relname = $1 AND relnamespace = $2;`, - GET_FUNCTIONS_WITH_PROCEDURES: ` + GET_FUNCTIONS_WITH_PROCEDURES: ` SELECT specific_name, routine_name AS name, routine_type, @@ -167,7 +174,7 @@ const queryConstants = { type_udt_name AS return_data_type FROM information_schema.routines WHERE specific_schema=$1;`, - GET_FUNCTIONS_WITH_PROCEDURES_ARGS: ` + GET_FUNCTIONS_WITH_PROCEDURES_ARGS: ` SELECT parameter_name, parameter_mode, parameter_default, @@ -176,7 +183,7 @@ const queryConstants = { FROM information_schema.parameters WHERE specific_name = $1 ORDER BY ordinal_position;`, - GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL: ` + GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL: ` SELECT obj_description(oid, 'pg_proc') AS description, proname AS function_name, provolatile AS volatility, @@ -188,7 +195,22 @@ const queryConstants = { prorows AS estimated_rows, prokind AS kind FROM pg_catalog.pg_proc WHERE pronamespace = $1;`, - GET_USER_DEFINED_TYPES: ` + GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL_V_10: ` + SELECT obj_description(oid, 'pg_proc') AS description, + proname AS function_name, + provolatile AS volatility, + proparallel AS parallel, + proisstrict AS strict, + proretset AS returns_set, + proleakproof AS leak_proof, + procost AS estimated_cost, + prorows AS estimated_rows, + CASE + WHEN proiswindow is true THEN 'w' + ELSE '' + END AS kind + FROM pg_catalog.pg_proc WHERE pronamespace = $1;`, + GET_USER_DEFINED_TYPES: ` SELECT pg_type.typrelid AS pg_class_oid, pg_type.typname AS name, pg_type.typtype AS type, @@ -222,7 +244,7 @@ const queryConstants = { range_opclass_name, range_canonical_proc, range_diff_proc;`, - GET_COMPOSITE_TYPE_COLUMNS: ` + GET_COMPOSITE_TYPE_COLUMNS: ` SELECT pg_attribute.attname AS column_name, pg_type.typname AS data_type, pg_get_expr(pg_attrdef.adbin, pg_attrdef.adrelid) AS columns_default, @@ -236,11 +258,11 @@ const queryConstants = { AND pg_attrdef.adnum = pg_attribute.attnum) LEFT JOIN pg_catalog.pg_collation AS pg_collation ON (pg_collation.oid = pg_attribute.attcollation) WHERE pg_attribute.attrelid = $1`, - GET_DB_NAME: 'SELECT current_database();', - GET_DB_ENCODING: 'SHOW SERVER_ENCODING;', - GET_DB_COLLATE_NAME: 'SELECT default_collate_name FROM information_schema.character_sets;', - GET_DOMAIN_TYPES: 'SELECT * FROM information_schema.domains WHERE domain_schema = $1', - GET_DOMAIN_TYPES_CONSTRAINTS: ` + GET_DB_NAME: 'SELECT current_database();', + GET_DB_ENCODING: 'SHOW SERVER_ENCODING;', + GET_DB_COLLATE_NAME: 'SELECT default_collate_name FROM information_schema.character_sets;', + GET_DOMAIN_TYPES: 'SELECT * FROM information_schema.domains WHERE domain_schema = $1', + GET_DOMAIN_TYPES_CONSTRAINTS: ` SELECT pg_type.typname AS type_name, pg_type.typnotnull AS not_null, pg_constraint.conname AS constraint_name, @@ -249,18 +271,18 @@ const queryConstants = { LEFT JOIN pg_catalog.pg_constraint AS pg_constraint ON (pg_constraint.contypid = pg_type.oid) LEFT JOIN pg_catalog.pg_namespace AS pg_namespace ON (pg_namespace.oid = pg_type.typnamespace) WHERE pg_type.typname = $1 AND pg_namespace.nspname = $2 AND pg_constraint.contype = 'c';`, - GET_DATABASES: - 'SELECT datname AS database_name FROM pg_catalog.pg_database WHERE datistemplate != TRUE AND datallowconn = TRUE;', + GET_DATABASES: + 'SELECT datname AS database_name FROM pg_catalog.pg_database WHERE datistemplate != TRUE AND datallowconn = TRUE;', }; const getQueryName = query => { - const queryEntry = - Object.entries(queryConstants).find(([queryName, constantQuery]) => query === constantQuery) || []; + const queryEntry = + Object.entries(queryConstants).find(([queryName, constantQuery]) => query === constantQuery) || []; - return queryEntry[0] || 'Custom query'; + return queryEntry[0] || 'Custom query'; }; module.exports = { - getQueryName, - ...queryConstants, + getQueryName, + ...queryConstants, };