diff --git a/.env.example b/.env.example index 9a17dbe8..5733359a 100644 --- a/.env.example +++ b/.env.example @@ -1,8 +1,5 @@ SERVICE_NAME=water-abstraction-import -JWT_TOKEN= -JWT_SECRET= - RETURNS_URI= WATER_URI= CRM_URI= @@ -20,7 +17,6 @@ PROXY= # Airbrake config AIRBRAKE_HOST=https://my-errbit-instance.com AIRBRAKE_KEY=longvaluefullofnumbersandlettersinlowercase -ENVIRONMENT=dev ERRBIT_KEY= ERRBIT_SERVER= @@ -31,23 +27,17 @@ ENVIRONMENT=pre IMPORT_RETURNS_YEARS= NALD_ZIP_PASSWORD= -REDIS_URI= - S3_NALD_IMPORT_PATH= WATER_SERVICE_MAILBOX= +# In processes like licence-details, configure how many 'things' to import within each batch +IMPORT_PROCESS_BATCH_SIZE= + # For development only! See https://eaflood.atlassian.net/browse/WATER-3201 # Enables import of licence agreements during the licence import process. This was a one time import # run in production that we often need to re-run in local and non-production environments IMPORT_LICENCE_AGREEMENTS=false # Set log level for app. Default is 'info' -WRLS_LOG_LEVEL=debug - -# Use Cron type syntax to set timings for these background processes -WRLS_CRON_NALD='0 1 * * *' -WRLS_CRON_LICENCES='0 4 * * 1,2,3,4,5' -WRLS_CRON_RETURN_VERSIONS='0 6 * * 1,2,3,4,5' -WRLS_CRON_MOD_LOGS='0 7 * * 1,2,3,4,5' -WRLS_CRON_TRACKER='0 10 * * 1,2,3,4,5' +WRLS_LOG_LEVEL=info diff --git a/config.js b/config.js index 40b3cc2d..ac69db89 100644 --- a/config.js +++ b/config.js @@ -5,19 +5,12 @@ require('dotenv').config() const environment = process.env.ENVIRONMENT const isProduction = environment === 'prd' -const isTlsConnection = (process.env.REDIS_HOST || '').includes('aws') - module.exports = { blipp: { showAuth: true }, - jwt: { - key: process.env.JWT_SECRET, - verifyOptions: { algorithms: ['HS256'] } - }, - // This config is specifically for hapi-pino which was added to replace the deprecated (and noisy!) hapi/good. At // some point all logging would go through this. But for now, it just covers requests & responses log: { @@ -48,12 +41,6 @@ module.exports = { max: 20 }, - pgBoss: { - schema: 'water_import', - application_name: process.env.SERVICE_NAME, - newJobCheckIntervalSeconds: 10 - }, - server: { port: 8007, router: { @@ -80,14 +67,15 @@ module.exports = { proxy: process.env.PROXY, + // In processes like licence-details, configure how many 'things' to import within each batch + processBatchSize: parseInt(process.env.IMPORT_PROCESS_BATCH_SIZE) || 10, + import: { nald: { zipPassword: process.env.NALD_ZIP_PASSWORD, - path: process.env.S3_NALD_IMPORT_PATH || 'wal_nald_data_release', - schedule: process.env.WRLS_CRON_NALD || '0 1 * * *' + path: process.env.S3_NALD_IMPORT_PATH || 'wal_nald_data_release' }, licences: { - schedule: process.env.WRLS_CRON_LICENCES || '0 4 * * 1,2,3,4,5', // Note: these 2 flags need to be set to false for charging go-live // to suspend the import of invoice accounts and licence agreements // Update: I've changed those values to false ahead of the v2.0 charging @@ -102,28 +90,10 @@ module.exports = { // This will supersede the implementation here where the billing contact history // was calculated from NALD data isBillingDocumentRoleImportEnabled: false - }, - modLogs: { - schedule: process.env.WRLS_CRON_MOD_LOGS || '0 7 * * 1,2,3,4,5' - }, - returnVersions: { - schedule: process.env.WRLS_CRON_RETURN_VERSIONS || '0 6 * * 1,2,3,4,5' - }, - tracker: { - schedule: process.env.WRLS_CRON_TRACKER || '0 10 * * 1,2,3,4,5' } }, - - redis: { - host: process.env.REDIS_HOST || '127.0.0.1', - port: process.env.REDIS_PORT || 6379, - password: process.env.REDIS_PASSWORD || '', - ...(isTlsConnection) && { tls: {} }, - db: 0 - }, notify: { - templates: { - service_status_alert: 'c34d1b16-694b-4364-8e7e-83e9dbd34a62' - } + mailbox: process.env.WATER_SERVICE_MAILBOX, + templateId: 'c34d1b16-694b-4364-8e7e-83e9dbd34a62' } } diff --git a/index.js b/index.js index 5acb3779..e3ae7b25 100644 --- a/index.js +++ b/index.js @@ -4,7 +4,6 @@ require('dotenv').config() // -------------- Require vendor code ----------------- const Blipp = require('blipp') -const HapiAuthJwt2 = require('hapi-auth-jwt2') const moment = require('moment') moment.locale('en-gb') @@ -23,26 +22,9 @@ const plugins = [ { plugin: Blipp, options: config.blipp - }, - HapiAuthJwt2, - require('./src/plugins/pg-boss.plugin'), - require('./src/modules/licence-import/plugin'), - require('./src/modules/charging-import/plugin'), - require('./src/modules/mod-logs/plugin'), - require('./src/modules/return-versions/plugin.js'), - require('./src/modules/nald-import/plugin'), - require('./src/modules/bill-runs-import/plugin'), - require('./src/modules/core/plugin') + } ] -const configureServerAuthStrategy = (server) => { - server.auth.strategy('jwt', 'jwt', { - ...config.jwt, - validate: async (decoded) => ({ isValid: !!decoded.id }) - }) - server.auth.default('jwt') -} - const start = async function () { await server.register(plugins) @@ -54,8 +36,6 @@ const start = async function () { server.validator(require('@hapi/joi')) - configureServerAuthStrategy(server) - server.route(routes) if (!module.parent) { @@ -83,7 +63,6 @@ process // If there are no in-flight requests Hapi will immediately stop. If there are they get 25 seconds to finish // before Hapi terminates them await server.stop(options) - await server.messageQueue.stop() // Log we're shut down using the same log format as the rest of our log output server.logger.info("That's all folks!") diff --git a/package-lock.json b/package-lock.json index 9000d9b2..6a5e0fae 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,6 @@ "@envage/hapi-pg-rest-api": "^4.1.2", "@envage/water-abstraction-helpers": "^4.9.0", "@hapi/boom": "^9.1.0", - "@hapi/catbox-redis": "^5.0.5", "@hapi/hapi": "^21.3.10", "@hapi/hoek": "^10.0.1", "@hapi/joi": "^15.1.1", @@ -26,13 +25,10 @@ "deep-map": "^2.0.0", "dotenv": "^8.2.0", "firstline": "^2.0.2", - "hapi-auth-jwt2": "^8.8.1", "hapi-pino": "^11.0.1", "mkdir": "0.0.2", "moment": "^2.30.1", - "node-cron": "^2.0.3", "pg": "^8.12.0", - "pg-boss": "^3.2.2", "proxy-agent": "^3.1.1", "request": "^2.88.2", "request-promise-native": "^1.0.9", @@ -693,35 +689,6 @@ "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-11.0.4.tgz", "integrity": "sha512-PnsP5d4q7289pS2T2EgGz147BFJ2Jpb4yrEdkpz2IhgEUzos1S7HTl7ezWh1yfYzYlj89KzLdCRkqsP6SIryeQ==" }, - "node_modules/@hapi/catbox-redis": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/@hapi/catbox-redis/-/catbox-redis-5.0.5.tgz", - "integrity": "sha512-S1O9yfnuGQYYB6EVXYcmYlXnyN6GJ9jICUvm0nCp5DA3dw0PHbdPN9zmtJzltOnZkhYF5Lzcn+12MA4Ot5N7bw==", - "dependencies": { - "@hapi/hoek": "8.x.x", - "@hapi/joi": "16.x.x", - "ioredis": "4.x.x" - } - }, - "node_modules/@hapi/catbox-redis/node_modules/@hapi/hoek": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", - "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==", - "deprecated": "This version has been deprecated and is no longer supported or maintained" - }, - "node_modules/@hapi/catbox-redis/node_modules/@hapi/joi": { - "version": "16.1.8", - "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-16.1.8.tgz", - "integrity": "sha512-wAsVvTPe+FwSrsAurNt5vkg3zo+TblvC5Bb1zMVK6SJzZqw9UrJnexxR+76cpePmtUZKHAPxcQ2Bf7oVHyahhg==", - "deprecated": "Switch to 'npm install joi'", - "dependencies": { - "@hapi/address": "^2.1.2", - "@hapi/formula": "^1.2.0", - "@hapi/hoek": "^8.2.4", - "@hapi/pinpoint": "^1.0.2", - "@hapi/topo": "^3.1.3" - } - }, "node_modules/@hapi/catbox/node_modules/@hapi/boom": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/@hapi/boom/-/boom-10.0.1.tgz", @@ -834,12 +801,6 @@ "resolved": "https://registry.npmjs.org/@hapi/file/-/file-3.0.0.tgz", "integrity": "sha512-w+lKW+yRrLhJu620jT3y+5g2mHqnKfepreykvdOcl9/6up8GrQQn+l3FRTsjHTKbkbfQFkuksHpdv2EcpKcJ4Q==" }, - "node_modules/@hapi/formula": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@hapi/formula/-/formula-1.2.0.tgz", - "integrity": "sha512-UFbtbGPjstz0eWHb+ga/GM3Z9EzqKXFWIbSOFURU0A/Gku0Bky4bCk9/h//K2Xr3IrCfjFNhMm4jyZ5dbCewGA==", - "deprecated": "Moved to 'npm install @sideway/formula'" - }, "node_modules/@hapi/hapi": { "version": "21.3.10", "resolved": "https://registry.npmjs.org/@hapi/hapi/-/hapi-21.3.10.tgz", @@ -1080,12 +1041,6 @@ "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-11.0.2.tgz", "integrity": "sha512-aKmlCO57XFZ26wso4rJsW4oTUnrgTFw2jh3io7CAtO9w4UltBNwRXvXIVzzyfkaaLRo3nluP/19msA8vDUUuKw==" }, - "node_modules/@hapi/pinpoint": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@hapi/pinpoint/-/pinpoint-1.0.2.tgz", - "integrity": "sha512-dtXC/WkZBfC5vxscazuiJ6iq4j9oNx1SHknmIr8hofarpKUZKmlUVYVIhNVzIEgK5Wrc4GMHL5lZtt1uS2flmQ==", - "deprecated": "Moved to 'npm install @sideway/pinpoint'" - }, "node_modules/@hapi/podium": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@hapi/podium/-/podium-5.0.1.tgz", @@ -2184,25 +2139,12 @@ "isarray": "^1.0.0" } }, - "node_modules/buffer-equal-constant-time": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" - }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, - "node_modules/buffer-writer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", - "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", - "engines": { - "node": ">=4" - } - }, "node_modules/builtins": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", @@ -2379,14 +2321,6 @@ "node": ">=0.8" } }, - "node_modules/cluster-key-slot": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", - "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -2442,14 +2376,6 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, - "node_modules/cookie": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", - "integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==", - "engines": { - "node": ">= 0.6" - } - }, "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", @@ -2754,14 +2680,6 @@ "node": ">=0.4.0" } }, - "node_modules/denque": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", - "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", - "engines": { - "node": ">=0.10" - } - }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -2831,14 +2749,6 @@ "safer-buffer": "^2.1.0" } }, - "node_modules/ecdsa-sig-formatter": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", - "dependencies": { - "safe-buffer": "^5.0.1" - } - }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -4032,34 +3942,6 @@ "node": ">=0.10.0" } }, - "node_modules/hapi-auth-jwt2": { - "version": "8.8.1", - "resolved": "https://registry.npmjs.org/hapi-auth-jwt2/-/hapi-auth-jwt2-8.8.1.tgz", - "integrity": "sha512-+Jw5OaUZZGBCDxb3f/ax3WnZvqDzQFGQrz6/AuctywaIlW+ow+NH+Gw6ik/37OZGlYpxO/GNsfiyuDmlUQf9Tg==", - "dependencies": { - "@hapi/boom": "^8.0.1", - "cookie": "^0.4.0", - "jsonwebtoken": "^8.5.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/hapi-auth-jwt2/node_modules/@hapi/boom": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@hapi/boom/-/boom-8.0.1.tgz", - "integrity": "sha512-SnBM2GzEYEA6AGFKXBqNLWXR3uNBui0bkmklYXX1gYtevVhDTy2uakwkSauxvIWMtlANGRhzChYg95If3FWCwA==", - "deprecated": "This version has been deprecated and is no longer supported or maintained", - "dependencies": { - "@hapi/hoek": "8.x.x" - } - }, - "node_modules/hapi-auth-jwt2/node_modules/@hapi/hoek": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", - "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==", - "deprecated": "This version has been deprecated and is no longer supported or maintained" - }, "node_modules/hapi-pino": { "version": "11.0.1", "resolved": "https://registry.npmjs.org/hapi-pino/-/hapi-pino-11.0.1.tgz", @@ -4489,31 +4371,6 @@ "node": ">= 0.4" } }, - "node_modules/ioredis": { - "version": "4.28.5", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.28.5.tgz", - "integrity": "sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A==", - "dependencies": { - "cluster-key-slot": "^1.1.0", - "debug": "^4.3.1", - "denque": "^1.1.0", - "lodash.defaults": "^4.2.0", - "lodash.flatten": "^4.4.0", - "lodash.isarguments": "^3.1.0", - "p-map": "^2.1.0", - "redis-commands": "1.7.0", - "redis-errors": "^1.2.0", - "redis-parser": "^3.0.0", - "standard-as-callback": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/ioredis" - } - }, "node_modules/ip": { "version": "1.1.8", "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz", @@ -4942,35 +4799,6 @@ "json5": "lib/cli.js" } }, - "node_modules/jsonwebtoken": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", - "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", - "dependencies": { - "jws": "^3.2.2", - "lodash.includes": "^4.3.0", - "lodash.isboolean": "^3.0.3", - "lodash.isinteger": "^4.0.4", - "lodash.isnumber": "^3.0.3", - "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1", - "lodash.once": "^4.0.0", - "ms": "^2.1.1", - "semver": "^5.6.0" - }, - "engines": { - "node": ">=4", - "npm": ">=1.4.28" - } - }, - "node_modules/jsonwebtoken/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "bin": { - "semver": "bin/semver" - } - }, "node_modules/jsprim": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", @@ -5004,25 +4832,6 @@ "integrity": "sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==", "dev": true }, - "node_modules/jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "dependencies": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "dependencies": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, "node_modules/levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -5083,63 +4892,18 @@ "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==" }, - "node_modules/lodash.flatten": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" - }, "node_modules/lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", "dev": true }, - "node_modules/lodash.includes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", - "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" - }, - "node_modules/lodash.isarguments": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", - "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==" - }, - "node_modules/lodash.isboolean": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", - "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" - }, - "node_modules/lodash.isinteger": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", - "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" - }, - "node_modules/lodash.isnumber": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", - "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" - }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" - }, - "node_modules/lodash.isstring": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" - }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" - }, "node_modules/loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -5368,19 +5132,6 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, - "node_modules/node-cron": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-2.0.3.tgz", - "integrity": "sha512-eJI+QitXlwcgiZwNNSRbqsjeZMp5shyajMR81RZCqeW0ZDEj4zU9tpd4nTh/1JsBiKbF8d08FCewiipDmVIYjg==", - "hasInstallScript": true, - "dependencies": { - "opencollective-postinstall": "^2.0.0", - "tz-offset": "0.0.1" - }, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", @@ -5559,14 +5310,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/opencollective-postinstall": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz", - "integrity": "sha512-8AV/sCtuzUeTo8gQK5qDZzARrulB3egtLzFgteqB2tcT4Mw7B8Kt7JcDHmltjz6FOAHsvTevk70gZEbhM4ZS9Q==", - "bin": { - "opencollective-postinstall": "index.js" - } - }, "node_modules/optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", @@ -5618,14 +5361,6 @@ "node": ">=6" } }, - "node_modules/p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "engines": { - "node": ">=6" - } - }, "node_modules/p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -5661,11 +5396,6 @@ "thunkify": "^2.1.2" } }, - "node_modules/packet-reader": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", - "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -5789,68 +5519,6 @@ } } }, - "node_modules/pg-boss": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/pg-boss/-/pg-boss-3.2.2.tgz", - "integrity": "sha512-UL/F7coW1gtXNb/WzwqQ3Bv1wMsSH2oSpeAB9zCJ4RHZXZ0f6JNDMcUZPeXIyZdEIq3GjcqwYtJy4F/Z45pufQ==", - "dependencies": { - "bluebird": "^3.5.2", - "pg": "^7.12.0", - "uuid": "^3.2.1" - }, - "engines": { - "node": ">=8.0.0", - "npm": ">=5.0.0" - } - }, - "node_modules/pg-boss/node_modules/pg": { - "version": "7.18.2", - "resolved": "https://registry.npmjs.org/pg/-/pg-7.18.2.tgz", - "integrity": "sha512-Mvt0dGYMwvEADNKy5PMQGlzPudKcKKzJds/VbOeZJpb6f/pI3mmoXX0JksPgI3l3JPP/2Apq7F36O63J7mgveA==", - "dependencies": { - "buffer-writer": "2.0.0", - "packet-reader": "1.0.0", - "pg-connection-string": "0.1.3", - "pg-packet-stream": "^1.1.0", - "pg-pool": "^2.0.10", - "pg-types": "^2.1.0", - "pgpass": "1.x", - "semver": "4.3.2" - }, - "engines": { - "node": ">= 4.5.0" - } - }, - "node_modules/pg-boss/node_modules/pg-connection-string": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz", - "integrity": "sha512-i0NV/CrSkFTaiOQs9AGy3tq0dkSjtTd4d7DfsjeDVZAA4aIHInwfFEmriNYGGJUfZ5x6IAC/QddoUpUJjQAi0w==" - }, - "node_modules/pg-boss/node_modules/pg-pool": { - "version": "2.0.10", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-2.0.10.tgz", - "integrity": "sha512-qdwzY92bHf3nwzIUcj+zJ0Qo5lpG/YxchahxIN8+ZVmXqkahKXsnl2aiJPHLYN9o5mB/leG+Xh6XKxtP7e0sjg==", - "peerDependencies": { - "pg": ">5.0" - } - }, - "node_modules/pg-boss/node_modules/semver": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz", - "integrity": "sha512-VyFUffiBx8hABJ9HYSTXLRwyZtdDHMzMtFmID1aiNAD2BZppBmJm0Hqw3p2jkgxP9BNt1pQ9RnC49P0EcXf6cA==", - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/pg-boss/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/pg-cloudflare": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz", @@ -5870,11 +5538,6 @@ "node": ">=4.0.0" } }, - "node_modules/pg-packet-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/pg-packet-stream/-/pg-packet-stream-1.1.0.tgz", - "integrity": "sha512-kRBH0tDIW/8lfnnOyTwKD23ygJ/kexQVXZs7gEyBljw4FYqimZFxnMMx50ndZ8In77QgfGuItS5LLclC2TtjYg==" - }, "node_modules/pg-pool": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.2.tgz", @@ -6406,30 +6069,6 @@ "node": ">= 12.13.0" } }, - "node_modules/redis-commands": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", - "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" - }, - "node_modules/redis-errors": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==", - "engines": { - "node": ">=4" - } - }, - "node_modules/redis-parser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", - "dependencies": { - "redis-errors": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/regexp.prototype.flags": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", @@ -7031,11 +6670,6 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/standard-as-callback": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", - "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==" - }, "node_modules/standard-engine": { "version": "15.0.0", "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-15.0.0.tgz", @@ -8153,11 +7787,6 @@ "node": ">=4.2.0" } }, - "node_modules/tz-offset": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/tz-offset/-/tz-offset-0.0.1.tgz", - "integrity": "sha512-kMBmblijHJXyOpKzgDhKx9INYU4u4E1RPMB0HqmKSgWG8vEcf3exEfLh4FFfzd3xdQOw9EuIy/cP0akY6rHopQ==" - }, "node_modules/uglify-js": { "version": "3.17.0", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.17.0.tgz", diff --git a/package.json b/package.json index 1c50ca90..abecdb90 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,6 @@ "@envage/hapi-pg-rest-api": "^4.1.2", "@envage/water-abstraction-helpers": "^4.9.0", "@hapi/boom": "^9.1.0", - "@hapi/catbox-redis": "^5.0.5", "@hapi/hapi": "^21.3.10", "@hapi/hoek": "^10.0.1", "@hapi/joi": "^15.1.1", @@ -37,13 +36,10 @@ "deep-map": "^2.0.0", "dotenv": "^8.2.0", "firstline": "^2.0.2", - "hapi-auth-jwt2": "^8.8.1", "hapi-pino": "^11.0.1", "mkdir": "0.0.2", "moment": "^2.30.1", - "node-cron": "^2.0.3", "pg": "^8.12.0", - "pg-boss": "^3.2.2", "proxy-agent": "^3.1.1", "request": "^2.88.2", "request-promise-native": "^1.0.9", diff --git a/server.js b/server.js index a0f4557d..87160256 100644 --- a/server.js +++ b/server.js @@ -3,20 +3,11 @@ require('dotenv').config() const Hapi = require('@hapi/hapi') -const CatboxRedis = require('@hapi/catbox-redis') const config = require('./config') const server = Hapi.server({ - ...config.server, - cache: [ - { - provider: { - constructor: CatboxRedis, - options: config.redis - } - } - ] + ...config.server }) module.exports = server diff --git a/src/lib/connectors/db.js b/src/lib/connectors/db.js index f0fbd72f..ede909bd 100644 --- a/src/lib/connectors/db.js +++ b/src/lib/connectors/db.js @@ -15,6 +15,17 @@ pg.types.setTypeParser(pg.types.builtins.DATE, dateMapper) const pool = helpers.db.createPool(config.pg, logger) +async function query (query, params = []) { + const { error, rows } = await pool.query(query, params) + + if (error) { + throw error + } + + return rows +} + module.exports = { - pool + pool, + query } diff --git a/src/lib/connectors/import.js b/src/lib/connectors/import.js deleted file mode 100644 index 14cb3a9b..00000000 --- a/src/lib/connectors/import.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -const { pool } = require('./db') - -const getLicenceNumbersQuery = ` - select "LIC_NO" - from import."NALD_ABS_LICENCES"; -` - -const deleteCrmV1DocumentsQuery = ` - update crm.document_header - set date_deleted = now() - where system_external_id not in ( - select l."LIC_NO" - from import."NALD_ABS_LICENCES" l - ) - and date_deleted is null - and regime_entity_id = '0434dc31-a34e-7158-5775-4694af7a60cf'; -` - -const getLicenceNumbers = async () => { - const response = await pool.query(getLicenceNumbersQuery) - return response.rows || [] -} - -const deleteRemovedDocuments = () => { - return pool.query(deleteCrmV1DocumentsQuery) -} - -module.exports = { - getLicenceNumbers, - deleteRemovedDocuments -} diff --git a/src/lib/connectors/water-import/jobs.js b/src/lib/connectors/water-import/jobs.js deleted file mode 100644 index 09587f5e..00000000 --- a/src/lib/connectors/water-import/jobs.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict' - -const { pool } = require('../db') -const { pgBossJobOverview, pgBossFailedJobs } = require('./queries') -const moment = require('moment') - -const getJobSummary = () => { - /** - * The purpose of this route is to check the state of import jobs. - * - * The checks are intentionally broken into separate queries. This is because the import - * microservice is currently powered by PGBoss while other parts of The Service are powered - * by BullMQ. The assumption/anticipation is that at some point, import jobs will coexist across - * multiple job handlers... Maybe. - */ - const pgBossJobsArray = [ - { id: 'nald-import.import-licence', displayName: 'Licences (NALD)' }, - { id: 'import.licences', displayName: 'Licences' }, - { id: 'import.licence', displayName: 'Licence' }, - { id: 'import.delete-documents', displayName: 'Delete removed documents' }, - { id: 'nald-import.delete-removed-documents', displayName: 'Delete removed documents (NALD)' }, - { id: 'nald-import.s3-download', displayName: 'NALD Zip Download' }, - { id: 'nald-import.populate-pending-import', displayName: 'Populate pending import' }, - { id: 'import.bill-runs', displayName: 'Bill runs' }, - { id: 'import.charging-data', displayName: 'Charging data' }, - { id: 'import.companies', displayName: 'Companies' }, - { id: 'import.company', displayName: 'Company' } - ] - - return Promise.all(pgBossJobsArray.map(async eachJob => { - const { rows: status } = await pool.query(pgBossJobOverview, [eachJob.id]) - - const failedRow = status.find(row => row.state === 'failed') - const failedCount = parseInt(failedRow?.count ?? 0) - const completedRow = status.find(row => row.state === 'completed') - const completedCount = parseInt(completedRow?.count ?? 0) - const isActive = !!status.find(row => row.state === 'active') || !!status.find(row => row.state === 'created') - const lastUpdated = completedRow?.max_completed_date ?? null - - return { - displayName: eachJob.displayName, - failedCount, - completedCount, - isActive, - lastUpdated - } - })) -} - -const getFailedJobs = async () => { - const { rows } = await pool.query(pgBossFailedJobs) - return rows.map(row => { - return { - jobName: row.name, - total: row.count, - dateCreated: row.max_created_date ? moment(row.max_created_date).format('DD MMM YYYY HH:mm:ss') : '', - dateCompleted: row.max_completed_date ? moment(row.max_completed_date).format('DD MMM YYYY HH:mm:ss') : '' - } - }) -} - -module.exports = { - getFailedJobs, - getJobSummary -} diff --git a/src/lib/connectors/water-import/queries.js b/src/lib/connectors/water-import/queries.js deleted file mode 100644 index 26008127..00000000 --- a/src/lib/connectors/water-import/queries.js +++ /dev/null @@ -1,54 +0,0 @@ -const pgBossJobOverview = `select state, sum(count) as count, max(max_completed_date) as max_completed_date from (select - state, - COUNT(*), - max(completedon) as max_completed_date -from - water_import.job j -where - j.state in ('failed', 'completed', 'active', 'created') - and (j.createdon > now() - interval '3 days' or j.completedon > now() - interval '3 days') - and j.name = $1 - group by j.state -union all -select - state, - COUNT(*), - max(completedon) as max_completed_date -from - water_import.archive a -where - a.state in ('failed', 'completed', 'active', 'created') - and (a.createdon > now() - interval '3 days' or a.completedon > now() - interval '3 days') - and a.name = $1 - group by a.state) cte - group by state` - -const pgBossFailedJobs = `select name, sum(count) as count, max(max_completed_date) as max_completed_date, max(max_created_date) as max_created_date from (select - name, - COUNT(*), - max(completedon) as max_completed_date, - max(createdon) as max_created_date - from - water_import.job j - where - j.state = 'failed' - and (j.createdon > now() - interval '12 hours' or j.completedon > now() - interval '12 hours') - group by j.name - union all - select - name, - COUNT(*), - max(completedon) as max_completed_date, - max(createdon) as max_created_date - from - water_import.archive a - where - a.state = 'failed' - and (a.createdon > now() - interval '12 hours' or a.completedon > now() - interval '12 hours') - group by a.name) cte - group by name` - -module.exports = { - pgBossFailedJobs, - pgBossJobOverview -} diff --git a/src/lib/connectors/water/application-state.js b/src/lib/connectors/water/application-state.js deleted file mode 100644 index 66502fba..00000000 --- a/src/lib/connectors/water/application-state.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' - -const { serviceRequest } = require('@envage/water-abstraction-helpers') -const urlJoin = require('url-join') - -const config = require('../../../../config') - -const getUrl = key => { - return urlJoin(config.services.water, 'application-state', key) -} - -const getState = key => serviceRequest.get(getUrl(key)) - -const postState = (key, data) => { - const url = getUrl(key) - return serviceRequest.post(url, { - body: data - }) -} - -module.exports = { - getState, - postState -} diff --git a/src/lib/date-helpers.js b/src/lib/date-helpers.js index 362bd272..db61545f 100644 --- a/src/lib/date-helpers.js +++ b/src/lib/date-helpers.js @@ -2,35 +2,80 @@ const moment = require('moment') +const DATE_FORMAT = 'YYYY-MM-DD' +const NALD_FORMAT = 'DD/MM/YYYY' +const NALD_TRANSFER_FORMAT = 'DD/MM/YYYY HH:mm:ss' + /** - * Given an array of dates which can be parsed by Moment, - * filters out falsey values and returns a list of moment objects - * sorted in ascending date order - * @param {Array} arr - * @return {Array} + * Gets the end date for a company address from licence version data + * @param {Object} row - from NALD licence/licence version data + * @param {String,Null} currentEnd - the current value of the end date in the accumulator */ -const getSortedDates = arr => { - const filteredArray = arr.filter(value => value) - const mappedArray = filteredArray.map(value => moment(value)) - mappedArray.sort(function (startDate1, startDate2) { - if ((startDate1.unix > startDate2.unix)) { - return -1 - } else { - return 1 - } - }) - return mappedArray +function getEndDate (row, currentEnd) { + // Get all end dates for this row + const endDates = [row.EFF_END_DATE, row.EXPIRY_DATE, row.REV_DATE, row.LAPSED_DATE] + .map(mapNaldDate) + .filter(value => value) + + const arr = [getMinDate(endDates), currentEnd] + + return arr.includes(null) ? null : getMaxDate(arr) +} + +function getMaxDate (values) { + const sorted = _sortDates(values) + + return sorted.length === 0 ? null : sorted[sorted.length - 1].format(DATE_FORMAT) +} + +function getMinDate (values) { + const sorted = _sortDates(values) + + return sorted.length === 0 ? null : sorted[0].format(DATE_FORMAT) +} + +function getPreviousDay (value) { + return moment(value, DATE_FORMAT).subtract(1, 'day').format(DATE_FORMAT) +} + +function mapIsoDateToNald (value) { + if (value === null) { + return 'null' + } + + return moment(value, DATE_FORMAT).format(NALD_FORMAT) } -const getMinDate = arr => { - return getSortedDates(arr)[0] +function mapNaldDate (value) { + if (value === 'null') { + return null + } + + return moment(value, NALD_FORMAT).format(DATE_FORMAT) } -const getMaxDate = arr => { - const sorted = getSortedDates(arr) - return sorted[sorted.length - 1] + +function mapTransferDate (value) { + return moment(value, NALD_TRANSFER_FORMAT).format(DATE_FORMAT) +} + +function _sortDates (arr) { + const moments = arr + .map(value => moment(value, DATE_FORMAT)) + .filter(m => m.isValid()) + + const sorted = moments.sort(function (startDate1, startDate2) { + return startDate1 - startDate2 + }) + + return sorted } module.exports = { + getEndDate, getMinDate, - getMaxDate + getMaxDate, + getPreviousDay, + mapIsoDateToNald, + mapNaldDate, + mapTransferDate } diff --git a/src/lib/general.js b/src/lib/general.js new file mode 100644 index 00000000..9914eb1d --- /dev/null +++ b/src/lib/general.js @@ -0,0 +1,94 @@ +'use strict' + +const { randomUUID } = require('crypto') + +/** + * Calculates and logs the time taken in milliseconds between the provided `startTime` and the current time + * + * We often want to see how long a process takes and capture it in our logs. This can be especially useful when we + * have a process that involves talking to an external one. By capturing the time it takes our process to complete + * we can deal with any challenges about the performance of our process VS the total time taken. + * + * To do that you need to record the time when the process starts and the time when the process ends and then work out + * the duration. Doing that with JavaScript time constructs though gets very messy and we want to avoid bringing in + * 3rd party packages for just this one thing. + * + * Unfortunately, we cannot find the original source but a 'neat' way of doing it is to use + * {@link https://nodejs.org/api/process.html#processhrtimebigint | process.hrtime.bigint()} which returns + * "the current high-resolution real time in nanoseconds". + * + * Assuming a process recorded the start time using `currentTimeInNanoseconds()` when passed to this helper it will + * work out the time taken in nanoseconds, convert that to milliseconds and seconds and output it as a log message. + * + * @param {bigint} startTime - the time the process started in nanoseconds + * @param {string} message - the message to log + * @param {object} [data] - additional data to include with the log output + * + * @returns {object} the log data generated and logged + */ +function calculateAndLogTimeTaken (startTime, message, data = {}) { + const endTime = currentTimeInNanoseconds() + const timeTakenNs = endTime - startTime + const timeTakenMs = timeTakenNs / 1000000n + const timeTakenSs = timeTakenMs / 1000n + + const logData = { + timeTakenMs, + timeTakenSs, + ...data + } + + global.GlobalNotifier.omg(message, logData) + + return logData +} + +/** + * Returns the current time in nanoseconds. Used as part of logging how long something takes + * + * We often want to see how long a process takes and capture it in our logs. This can be especially useful when we + * have a process that involves talking to an external one. By capturing the time it takes our process to complete + * we can deal with any challenges about the performance of our process VS the total time taken. + * + * To do that you need to record the time when the process starts and the time when the process ends and then work out + * the duration. Doing that with JavaScript time constructs though gets very messy and we want to avoid bringing in + * 3rd party packages for just this one thing. + * + * Unfortunately, we cannot find the original source but a 'neat' way of doing it is to use + * {@link https://nodejs.org/api/process.html#processhrtimebigint | process.hrtime.bigint()} which returns + * "the current high-resolution real time in nanoseconds". + * + * Do the same at the end and take one from the other, and you then have the duration in nanoseconds which you can + * easily convert into something more readable. + * + * @returns {bigint} the current time in nanoseconds + */ +function currentTimeInNanoseconds () { + return process.hrtime.bigint() +} + +/** + * Generate a Universally Unique Identifier (UUID) + * + * The service uses these as the IDs for most records in the DB. Most tables will automatically generate them when + * the record is created but not all do. There are also times when it is either more performant, simpler, or both for + * us to generate the ID before inserting a new record. For example, we can pass the generated ID to child records to + * set the foreign key relationship. + * + * NOTE: We set `disableEntropyCache` to `false` as normally, for performance reasons node caches enough random data to + * generate up to 128 UUIDs. We disable this as we may need to generate more than this and the performance hit in + * disabling this cache is a rounding error in comparison to the rest of the process. + * + * https://nodejs.org/api/crypto.html#cryptorandomuuidoptions + * + * @returns {string} a randomly generated UUID + */ +function generateUUID () { + return randomUUID({ disableEntropyCache: true }) +} + +module.exports = { + calculateAndLogTimeTaken, + currentTimeInNanoseconds, + generateUUID +} diff --git a/src/lib/notifiers/base-notifier.lib.js b/src/lib/notifiers/base-notifier.lib.js index 6d233315..47434fb4 100644 --- a/src/lib/notifiers/base-notifier.lib.js +++ b/src/lib/notifiers/base-notifier.lib.js @@ -108,6 +108,19 @@ class BaseNotifierLib { }) } + /** + * Use to add an error message to the log but not send an Airbrake notification + * + * The message will be added as an `ERROR` level log message. + * + * @param {string} message Message to add to the log (ERROR) + * @param {Object} [data={}] An object containing any values to be logged, for example, a bill run ID to be included with + * the log message. Defaults to an empty object + */ + oops (message, data = {}) { + this._logger.error(this._formatLogPacket(data), message) + } + /** * Flush any outstanding Airbrake notifications * diff --git a/src/lib/services/application-state-service.js b/src/lib/services/application-state-service.js deleted file mode 100644 index abfd21a3..00000000 --- a/src/lib/services/application-state-service.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -const applicationStateConnector = require('../connectors/water/application-state') -const constants = require('../../modules/nald-import/lib/constants') - -const get = async identifier => { - const state = await applicationStateConnector.getState(identifier) - return state.data -} - -const save = (key = constants.APPLICATION_STATE_KEY, data = {}) => applicationStateConnector.postState(key, data) - -module.exports = { - get, - save -} diff --git a/src/lib/services/import.js b/src/lib/services/import.js deleted file mode 100644 index c974d9fc..00000000 --- a/src/lib/services/import.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -const importConnector = require('../connectors/import') - -const getLicenceNumbers = async () => { - const licenceNumbers = await importConnector.getLicenceNumbers() - return licenceNumbers.map(licenceNumber => licenceNumber.LIC_NO) -} - -const deleteRemovedDocuments = async () => { - return importConnector.deleteRemovedDocuments() -} - -module.exports = { - deleteRemovedDocuments, - getLicenceNumbers -} diff --git a/src/lib/services/notify.js b/src/lib/services/notify.js deleted file mode 100644 index ce98b281..00000000 --- a/src/lib/services/notify.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -const notifyConnector = require('../connectors/water/notify') -const config = require('../../../config') - -/** - * Gets the notify template ID to use by inspecting the application config - * @param {Object} scheduledNotification - row from scheduled_notification table - * @return {String} - Notify template ID - */ -const getNotifyTemplate = messageRef => config.notify.templates[`${messageRef}`] -/** - * Sends an email via Water Service to Notify API - * @param {Object} client - Notify client - * @param {Object} messageRef - messageRef stored in config - * @return {Promise} - resolves when message sent - */ -const sendEmail = async (recipient, messageRef, personalisation) => { - const templateId = getNotifyTemplate(messageRef) - return notifyConnector.postSendNotify('email', { templateId, recipient, personalisation }) -} - -module.exports = { - sendEmail -} diff --git a/src/modules/bill-runs-import/controller.js b/src/modules/bill-runs-import/controller.js deleted file mode 100644 index aacad1a8..00000000 --- a/src/modules/bill-runs-import/controller.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const constants = require('./lib/constants') - -const postImportBillRuns = async request => { - await request.messageQueue.publish(constants.IMPORT_BILL_RUNS) - return { - error: null - } -} - -module.exports = { - postImportBillRuns -} diff --git a/src/modules/bill-runs-import/lib/constants.js b/src/modules/bill-runs-import/lib/constants.js deleted file mode 100644 index 11c9eadd..00000000 --- a/src/modules/bill-runs-import/lib/constants.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' - -module.exports = { - IMPORT_BILL_RUNS: 'import.bill-runs' -} diff --git a/src/modules/bill-runs-import/lib/import.js b/src/modules/bill-runs-import/lib/import.js deleted file mode 100644 index a14ce831..00000000 --- a/src/modules/bill-runs-import/lib/import.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' - -const queries = require('./queries') -const { pool } = require('../../../lib/connectors/db') - -const createRow = (tableName, query) => ({ - tableName, - query -}) - -const importQueries = [ - createRow('remove_constraints', queries.removeConstraints), - createRow('billing_batches', queries.importNaldBillRuns), - createRow('billing_invoices', queries.importNaldBillHeaders), - createRow('billing_invoice_licences', queries.importInvoiceLicences), - createRow('billing_transactions', queries.importTransactions), - createRow('billing_resetSecondPartChargeFlag', queries.resetIsSecondPartChargeFlag), - createRow('billing_setSecondPartChargeFlag', queries.setIsSecondPartChargeFlag), - createRow('billing_volumes', queries.importBillingVolumes), - createRow('billing_batch_charge_version_years', queries.importBillingBatchChargeVersionYears), - createRow('add_constraints', queries.addConstraints) -] - -/** - * Run SQL queries to import bill runs to the water - * billing tables. - * It is envisaged this will only be run once in production - * - * @return {Promise} - */ -const importBillRuns = async () => { - try { - global.GlobalNotifier.omg('import.bill-runs: started') - - for (const { query } of importQueries) { - await pool.query(query) - } - - global.GlobalNotifier.omg('import.bill-runs: finished') - } catch (error) { - global.GlobalNotifier.omfg('import.bill-runs: errored', error) - throw error - } -} - -module.exports = { - importBillRuns -} diff --git a/src/modules/bill-runs-import/plugin.js b/src/modules/bill-runs-import/plugin.js deleted file mode 100644 index b20fea8b..00000000 --- a/src/modules/bill-runs-import/plugin.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict' - -const routes = require('./routes') -const importer = require('./lib/import') -const constants = require('./lib/constants') - -const plugin = { - name: 'importBillRunData', - dependencies: ['pgBoss'], - register: async server => { - // Register routes - server.route(routes) - - // Register PG boss job - await server.messageQueue.subscribe(constants.IMPORT_BILL_RUNS, {}, importer.importBillRuns) - } -} - -module.exports = { - plugin -} diff --git a/src/modules/bill-runs-import/routes.js b/src/modules/bill-runs-import/routes.js deleted file mode 100644 index d7137377..00000000 --- a/src/modules/bill-runs-import/routes.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const controller = require('./controller') - -module.exports = [ - { - method: 'post', - handler: controller.postImportBillRuns, - path: '/import/1.0/bill-runs' - } -] diff --git a/src/modules/bill-runs/controller.js b/src/modules/bill-runs/controller.js new file mode 100644 index 00000000..df09cbfc --- /dev/null +++ b/src/modules/bill-runs/controller.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function billRuns (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + billRuns +} diff --git a/src/modules/bill-runs-import/lib/queries.js b/src/modules/bill-runs/lib/queries.js similarity index 91% rename from src/modules/bill-runs-import/lib/queries.js rename to src/modules/bill-runs/lib/queries.js index 0c9aa7b2..4e778607 100644 --- a/src/modules/bill-runs-import/lib/queries.js +++ b/src/modules/bill-runs/lib/queries.js @@ -8,7 +8,7 @@ insert into water.billing_batches ( invoice_value, credit_note_value ) select r.region_id, -(case +(case when nbr."BILL_RUN_TYPE"='A' then 'annual' when nbr."BILL_RUN_TYPE"='S' then 'supplementary' when nbr."BILL_RUN_TYPE"='R' then 'two_part_tariff' @@ -21,8 +21,8 @@ nbr."FIN_YEAR"::integer as to_financial_year_ending, nullif(nbr."NO_OF_INVS", 'null')::integer as invoice_count, nullif(nbr."NO_OF_CRNS", 'null')::integer as credit_note_count, ( -(nullif(nbr."VALUE_OF_INVS", 'null')::numeric * 100) + -(nullif(nbr."VALUE_OF_CRNS", 'null')::numeric * 100) +(nullif(nbr."VALUE_OF_INVS", 'null')::numeric * 100) + +(nullif(nbr."VALUE_OF_CRNS", 'null')::numeric * 100) )::bigint as net_total, nbr."BILL_RUN_NO"::integer as bill_run_number, concat_ws(':', nbr."FGAC_REGION_CODE", nbr."BILL_RUN_NO") in ( @@ -90,7 +90,7 @@ nullif(nbr."VALUE_OF_INVS", 'null')::numeric * 100 as invoice_value, nullif(nbr."VALUE_OF_CRNS", 'null')::numeric * 100 as credit_note_value from import."NALD_BILL_RUNS" nbr join water.regions r on nbr."FGAC_REGION_CODE"::integer=r.nald_region_id -where +where nbr."BILL_RUN_TYPE" in ('A', 'S', 'R') and nbr."IAS_XFER_DATE"<>'null' and nbr."FIN_YEAR"::integer>=2015 @@ -103,7 +103,7 @@ insert into water.billing_invoices ( is_credit, date_created, date_updated, billing_batch_id, financial_year_ending, legacy_id, metadata, invoice_number, rebilling_state ) -select +select ia.invoice_account_id, '{}'::jsonb as address, nbh."IAS_CUST_REF" as invoice_account_number, @@ -127,35 +127,35 @@ const importInvoiceLicences = ` insert into water.billing_invoice_licences ( billing_invoice_id, licence_ref, date_created, date_updated, licence_id ) -select -i.billing_invoice_id, -nl."LIC_NO" as licence_ref, -i.date_created, -i.date_updated, +select +i.billing_invoice_id, +nl."LIC_NO" as licence_ref, +i.date_created, +i.date_updated, l.licence_id from import."NALD_BILL_HEADERS" nbh join water.billing_invoices i on concat_ws(':', nbh."FGAC_REGION_CODE", nbh."ID")=i.legacy_id -left join import."NALD_BILL_TRANS" nbt on nbh."FGAC_REGION_CODE"=nbt."FGAC_REGION_CODE" and nbh."ID"=nbt."ABHD_ID" +left join import."NALD_BILL_TRANS" nbt on nbh."FGAC_REGION_CODE"=nbt."FGAC_REGION_CODE" and nbh."ID"=nbt."ABHD_ID" join import."NALD_ABS_LICENCES" nl on nbt."FGAC_REGION_CODE"=nl."FGAC_REGION_CODE" and nbt."LIC_ID"=nl."ID" left join water.licences l on nl."LIC_NO"=l.licence_ref on conflict (billing_invoice_id, licence_id) do nothing; ` const resetIsSecondPartChargeFlag = ` -update water.billing_transactions +update water.billing_transactions set is_two_part_second_part_charge = false; ` const setIsSecondPartChargeFlag = ` -update water.billing_transactions +update water.billing_transactions set is_two_part_second_part_charge = true where description ilike 'second%'; ` const importTransactions = ` -insert into water.billing_transactions ( - billing_invoice_licence_id, - charge_element_id, - start_date, +insert into water.billing_transactions ( + billing_invoice_licence_id, + charge_element_id, + start_date, end_date, abstraction_period, source, @@ -190,7 +190,7 @@ insert into water.billing_transactions ( calc_eiuc_factor, calc_eiuc_source_factor ) -select +select il.billing_invoice_licence_id, ce.charge_element_id, to_date(nbt."BILL_ST_DATE", 'DD/MM/YYYY') as start_date, @@ -215,7 +215,7 @@ i.date_updated, nbt."BILLABLE_ANN_QTY"::numeric as volume, null as section_126_factor, nbt."ELEMENT_AGRMNTS"='S127' as section_127_agreement, -case +case when left(nbt."LH_ACC_AGRMNTS", 4)='S130' then nbt."LH_ACC_AGRMNTS" else null end as section_130_agreement, @@ -241,24 +241,24 @@ join water.billing_invoices i on concat_ws(':', nbt."FGAC_REGION_CODE", nbt."ABH join ( -- Get billing invoice licences with NALD licence ID select il.billing_invoice_licence_id, il.billing_invoice_id, nl."ID" - from water.billing_invoice_licences il + from water.billing_invoice_licences il join import."NALD_ABS_LICENCES" nl on il.licence_ref=nl."LIC_NO" ) il on i.billing_invoice_id=il.billing_invoice_id and nbt."LIC_ID"=il."ID" left join water.charge_elements ce on concat_ws(':', nbt."FGAC_REGION_CODE", nbt."ACEL_ID")=ce.external_id left join( -- Gets standard charges - select - nbt."ID", - nbt."FGAC_REGION_CODE", + select + nbt."ID", + nbt."FGAC_REGION_CODE", 'standard'::water.charge_type as charge_type, nbt."FINAL_A1_BILLABLE_AMOUNT"::numeric*100 as net_amount, concat_ws(':', nbt."FGAC_REGION_CODE", nbt."ID", 'S') as legacy_id, concat( -- Two-part tariff charge prefix - case - when nbt2.is_two_part_tariff and nbt2.is_two_part_second_part_charge + case + when nbt2.is_two_part_tariff and nbt2.is_two_part_second_part_charge then concat('Second part ', nbt2.purpose_use_descr, ' charge') - when nbt2.is_two_part_tariff and not nbt2.is_two_part_second_part_charge + when nbt2.is_two_part_tariff and not nbt2.is_two_part_second_part_charge then concat('First part ', nbt2.purpose_use_descr, ' charge') end, -- At {charge element description} suffix for all 2PT charges @@ -267,7 +267,7 @@ left join( then concat(' at ', nbt2.charge_element_descr) end, -- Non 2PT - charge element description defaulting to purpose use description - case + case when not nbt2.is_two_part_tariff then coalesce(nbt2.charge_element_descr, nbt2.purpose_use_descr) end @@ -279,7 +279,7 @@ left join( case when nbt."ELEMENT_AGRMNTS" LIKE '%127' THEN REPLACE(nbt."ELEMENT_AGRMNT_VALS", 'x ', '')::numeric ELSE null END as s127 from import."NALD_BILL_TRANS" nbt join ( - select + select nbt."FGAC_REGION_CODE", nbt."ID", nbr."BILL_RUN_TYPE"='R' as is_two_part_second_part_charge, @@ -292,9 +292,9 @@ left join( join import."NALD_PURP_USES" npu on nce."APUR_APUS_CODE"=npu."CODE" ) nbt2 on nbt."FGAC_REGION_CODE"=nbt2."FGAC_REGION_CODE" and nbt."ID"=nbt2."ID" -- Gets compensation charges - union select - nbt."ID", - nbt."FGAC_REGION_CODE", + union select + nbt."ID", + nbt."FGAC_REGION_CODE", 'compensation'::water.charge_type as charge_type, nbt."FINAL_A2_BILLABLE_AMOUNT"::numeric*100 as net_amount, concat_ws(':', nbt."FGAC_REGION_CODE", nbt."ID", 'C') as legacy_id, @@ -307,14 +307,14 @@ left join( from import."NALD_BILL_TRANS" nbt join ( -- Non-TPT bill runs - select nbr."FGAC_REGION_CODE", nbr."BILL_RUN_NO" - from import."NALD_BILL_RUNS" nbr + select nbr."FGAC_REGION_CODE", nbr."BILL_RUN_NO" + from import."NALD_BILL_RUNS" nbr where nbr."BILL_RUN_TYPE" in ('A', 'S') ) nbr on nbt."FGAC_REGION_CODE"=nbr."FGAC_REGION_CODE" and nbt."ABRN_BILL_RUN_NO"=nbr."BILL_RUN_NO" left join ( -- Get flag for water undertaker licences select nl."FGAC_REGION_CODE", nl."ID", right(nl."AREP_EIUC_CODE", 3)='SWC' as is_water_undertaker - from import."NALD_ABS_LICENCES" nl + from import."NALD_ABS_LICENCES" nl ) nl on nbt."FGAC_REGION_CODE"=nl."FGAC_REGION_CODE" and nbt."LIC_ID"=nl."ID" where not (nbt."FINAL_A2_BILLABLE_AMOUNT"::numeric=0 and nl.is_water_undertaker=true) ) nbt2 on nbt2."FGAC_REGION_CODE"=nbt."FGAC_REGION_CODE" and nbt2."ID"=nbt."ID" @@ -328,8 +328,8 @@ insert into water.billing_volumes ( two_part_tariff_review, is_approved, billing_batch_id, volume, errored_on ) -select -t.charge_element_id, +select +t.charge_element_id, i.financial_year_ending as financial_year, ntr.is_summer, null as calculated_volume, @@ -340,15 +340,15 @@ true as is_approved, b.billing_batch_id, t.volume, null as errored_on -from water.billing_batches b +from water.billing_batches b join water.billing_invoices i on b.billing_batch_id=i.billing_batch_id join water.billing_invoice_licences il on il.billing_invoice_id=i.billing_invoice_id -join water.billing_transactions t on il.billing_invoice_licence_id=t.billing_invoice_licence_id +join water.billing_transactions t on il.billing_invoice_licence_id=t.billing_invoice_licence_id join water.charge_elements ce on t.charge_element_id=ce.charge_element_id join ( - -- Get the TPT season for each charge element/financial year combination + -- Get the TPT season for each charge element/financial year combination select *, - case + case -- for Thames/Southern, summer range is 29 April to 28 November when ntr."FGAC_REGION_CODE" in ('6', '7') then daterange( @@ -365,9 +365,9 @@ join ( ) end @> ntr.latest_return_date as is_summer from ( - select - ntr."FGAC_REGION_CODE", - concat_ws(':', ntr."FGAC_REGION_CODE", ntr."ACEL_ID") as external_id, + select + ntr."FGAC_REGION_CODE", + concat_ws(':', ntr."FGAC_REGION_CODE", ntr."ACEL_ID") as external_id, ntr."FIN_YEAR"::integer, to_date(ntr."LATEST_RET_DATE", 'DD/MM/YYYY') as latest_return_date from import."NALD_TPT_RETURNS" ntr @@ -379,7 +379,7 @@ where b.source='nald' and b.batch_type='two_part_tariff'; const importBillingBatchChargeVersionYears = ` insert into water.billing_batch_charge_version_years ( billing_batch_id, charge_version_id, financial_year_ending, - date_created, date_updated, status, + date_created, date_updated, status, transaction_type, is_summer ) select distinct @@ -389,7 +389,7 @@ i.financial_year_ending, b.date_created, b.date_updated, b.status, -case +case when t.is_two_part_second_part_charge then 'two_part_tariff' else 'annual' end::water.charge_version_years_transaction_type as transaction_type, @@ -398,31 +398,31 @@ from water.billing_batches b join water.billing_invoices i on b.billing_batch_id=i.billing_batch_id join water.billing_invoice_licences il on i.billing_invoice_id=il.billing_invoice_id join water.billing_transactions t on il.billing_invoice_licence_id=t.billing_invoice_licence_id -join water.charge_elements ce on t.charge_element_id=ce.charge_element_id +join water.charge_elements ce on t.charge_element_id=ce.charge_element_id where b.source='nald' on conflict do nothing; ` const removeConstraints = ` -alter table water.billing_invoices +alter table water.billing_invoices drop constraint fk_original_billing_invoice_id; alter table water.billing_transactions drop constraint billing_transactions_billing_invoice_licence_id_fkey, drop constraint billing_transactions_billing_transactions_fk_source_transaction_id;` -const addConstraints = ` -alter table water.billing_invoices -add constraint fk_original_billing_invoice_id -foreign key (original_billing_invoice_id) +const addConstraints = ` +alter table water.billing_invoices +add constraint fk_original_billing_invoice_id +foreign key (original_billing_invoice_id) references water.billing_invoices (billing_invoice_id); alter table water.billing_transactions -ADD constraint billing_transactions_billing_invoice_licence_id_fkey +ADD constraint billing_transactions_billing_invoice_licence_id_fkey foreign key (billing_invoice_licence_id) references water.billing_invoice_licences (billing_invoice_licence_id), ADD constraint billing_transactions_billing_transactions_fk_source_transaction_id - foreign key (source_transaction_id) + foreign key (source_transaction_id) references water.billing_transactions (billing_transaction_id);` module.exports = { diff --git a/src/modules/bill-runs/process-steps.js b/src/modules/bill-runs/process-steps.js new file mode 100644 index 00000000..aa7f5376 --- /dev/null +++ b/src/modules/bill-runs/process-steps.js @@ -0,0 +1,29 @@ +'use strict' + +const ImportStep = require('./steps/import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('bill-runs started') + + const startTime = currentTimeInNanoseconds() + + await ImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'bill-runs complete') + } catch (error) { + global.GlobalNotifier.oops('bill-runs failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/bill-runs/routes.js b/src/modules/bill-runs/routes.js new file mode 100644 index 00000000..1ee8a656 --- /dev/null +++ b/src/modules/bill-runs/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controller = require('./controller') + +const routes = [ + { + method: 'post', + handler: controller.billRuns, + path: '/bill-runs' + } +] + +module.exports = routes diff --git a/src/modules/bill-runs/steps/import.js b/src/modules/bill-runs/steps/import.js new file mode 100644 index 00000000..cc050e01 --- /dev/null +++ b/src/modules/bill-runs/steps/import.js @@ -0,0 +1,33 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const Queries = require('../lib/queries.js') + +async function go () { + try { + global.GlobalNotifier.omg('bill-runs.import started') + + const startTime = currentTimeInNanoseconds() + + await db.query(Queries.removeConstraints) + await db.query(Queries.importNaldBillRuns) + await db.query(Queries.importNaldBillHeaders) + await db.query(Queries.importInvoiceLicences) + await db.query(Queries.importTransactions) + await db.query(Queries.resetIsSecondPartChargeFlag) + await db.query(Queries.setIsSecondPartChargeFlag) + await db.query(Queries.importBillingVolumes) + await db.query(Queries.importBillingBatchChargeVersionYears) + await db.query(Queries.addConstraints) + + calculateAndLogTimeTaken(startTime, 'bill-runs.import complete') + } catch (error) { + global.GlobalNotifier.omfg('bill-runs.import errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/charge-versions/controllers.js b/src/modules/charge-versions/controllers.js new file mode 100644 index 00000000..bef4279a --- /dev/null +++ b/src/modules/charge-versions/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function chargeVersions (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + chargeVersions +} diff --git a/src/modules/charging-import/lib/job.js b/src/modules/charge-versions/lib/job.js similarity index 100% rename from src/modules/charging-import/lib/job.js rename to src/modules/charge-versions/lib/job.js diff --git a/src/modules/charging-import/lib/queries/charge-versions-metadata.js b/src/modules/charge-versions/lib/queries/charge-versions-metadata.js similarity index 100% rename from src/modules/charging-import/lib/queries/charge-versions-metadata.js rename to src/modules/charge-versions/lib/queries/charge-versions-metadata.js diff --git a/src/modules/charging-import/lib/queries/charge-versions.js b/src/modules/charge-versions/lib/queries/charge-versions.js similarity index 100% rename from src/modules/charging-import/lib/queries/charge-versions.js rename to src/modules/charge-versions/lib/queries/charge-versions.js diff --git a/src/modules/charging-import/lib/queries/charging.js b/src/modules/charge-versions/lib/queries/charging.js similarity index 100% rename from src/modules/charging-import/lib/queries/charging.js rename to src/modules/charge-versions/lib/queries/charging.js diff --git a/src/modules/charging-import/lib/query-loader.js b/src/modules/charge-versions/lib/query-loader.js similarity index 100% rename from src/modules/charging-import/lib/query-loader.js rename to src/modules/charge-versions/lib/query-loader.js diff --git a/src/modules/nald-import/lib/db.js b/src/modules/charge-versions/lib/transform-permit/db.js similarity index 89% rename from src/modules/nald-import/lib/db.js rename to src/modules/charge-versions/lib/transform-permit/db.js index cd6a9f68..aaf379d5 100644 --- a/src/modules/nald-import/lib/db.js +++ b/src/modules/charge-versions/lib/transform-permit/db.js @@ -1,4 +1,4 @@ -const { pool } = require('../../../lib/connectors/db') +const { pool } = require('../../../../lib/connectors/db.js') /** * Perform a database query by getting a client from the connection pool and releasing diff --git a/src/modules/nald-import/lib/end-date.js b/src/modules/charge-versions/lib/transform-permit/end-date.js similarity index 100% rename from src/modules/nald-import/lib/end-date.js rename to src/modules/charge-versions/lib/transform-permit/end-date.js diff --git a/src/modules/nald-import/lib/nald-queries/addresses.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/addresses.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/addresses.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/addresses.js diff --git a/src/modules/nald-import/lib/nald-queries/cache.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/cache.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/cache.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/cache.js diff --git a/src/modules/nald-import/lib/nald-queries/cams.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/cams.js similarity index 90% rename from src/modules/nald-import/lib/nald-queries/cams.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/cams.js index 72ee51b1..b9b990e3 100644 --- a/src/modules/nald-import/lib/nald-queries/cams.js +++ b/src/modules/charge-versions/lib/transform-permit/nald-queries/cams.js @@ -1,6 +1,6 @@ 'use strict' -const server = require('../../../../../server') +const server = require('../../../../../../server.js') const db = require('../db') const sql = require('./sql/cams') const cache = require('./cache') diff --git a/src/modules/nald-import/lib/nald-queries/core.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/core.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/core.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/core.js diff --git a/src/modules/nald-import/lib/nald-queries/licences.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/licences.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/licences.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/licences.js diff --git a/src/modules/nald-import/lib/nald-queries/parties.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/parties.js similarity index 95% rename from src/modules/nald-import/lib/nald-queries/parties.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/parties.js index 770ecfad..8940153e 100644 --- a/src/modules/nald-import/lib/nald-queries/parties.js +++ b/src/modules/charge-versions/lib/transform-permit/nald-queries/parties.js @@ -1,6 +1,6 @@ 'use strict' -const server = require('../../../../../server') +const server = require('../../../../../../server.js') const db = require('../db') const cache = require('./cache') const sql = require('./sql/parties') diff --git a/src/modules/nald-import/lib/nald-queries/purposes.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/purposes.js similarity index 97% rename from src/modules/nald-import/lib/nald-queries/purposes.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/purposes.js index 7e98d718..4a594dd5 100644 --- a/src/modules/nald-import/lib/nald-queries/purposes.js +++ b/src/modules/charge-versions/lib/transform-permit/nald-queries/purposes.js @@ -1,6 +1,6 @@ 'use strict' -const server = require('../../../../../server') +const server = require('../../../../../../server.js') const db = require('../db') const sql = require('./sql/purposes') const cache = require('./cache') diff --git a/src/modules/nald-import/lib/nald-queries/returns.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/returns.js similarity index 98% rename from src/modules/nald-import/lib/nald-queries/returns.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/returns.js index 9b3478ca..2cb2e6fb 100644 --- a/src/modules/nald-import/lib/nald-queries/returns.js +++ b/src/modules/charge-versions/lib/transform-permit/nald-queries/returns.js @@ -1,6 +1,6 @@ 'use strict' -const server = require('../../../../../server') +const server = require('../../../../../../server.js') const moment = require('moment') const db = require('../db') const cache = require('./cache') diff --git a/src/modules/nald-import/lib/nald-queries/roles.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/roles.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/roles.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/roles.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/addresses.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/addresses.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/addresses.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/addresses.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/cams.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/cams.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/cams.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/cams.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/core.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/core.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/core.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/core.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/licences.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/licences.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/licences.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/licences.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/parties.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/parties.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/parties.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/parties.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/purposes.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/purposes.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/purposes.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/purposes.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/returns.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/returns.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/returns.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/returns.js diff --git a/src/modules/nald-import/lib/nald-queries/sql/roles.js b/src/modules/charge-versions/lib/transform-permit/nald-queries/sql/roles.js similarity index 100% rename from src/modules/nald-import/lib/nald-queries/sql/roles.js rename to src/modules/charge-versions/lib/transform-permit/nald-queries/sql/roles.js diff --git a/src/modules/nald-import/transform-permit.js b/src/modules/charge-versions/lib/transform-permit/transform-permit.js similarity index 93% rename from src/modules/nald-import/transform-permit.js rename to src/modules/charge-versions/lib/transform-permit/transform-permit.js index 72b5c567..a4e7321e 100644 --- a/src/modules/nald-import/transform-permit.js +++ b/src/modules/charge-versions/lib/transform-permit/transform-permit.js @@ -1,16 +1,16 @@ const dates = require('@envage/water-abstraction-helpers').nald.dates -const licenceQueries = require('./lib/nald-queries/licences') -const partyQueries = require('./lib/nald-queries/parties') -const addressQueries = require('./lib/nald-queries/addresses') -const rolesQueries = require('./lib/nald-queries/roles') -const purposesQueries = require('./lib/nald-queries/purposes') +const licenceQueries = require('./nald-queries/licences') +const partyQueries = require('./nald-queries/parties') +const addressQueries = require('./nald-queries/addresses') +const rolesQueries = require('./nald-queries/roles') +const purposesQueries = require('./nald-queries/purposes') -const cams = require('./lib/nald-queries/cams') +const cams = require('./nald-queries/cams') -const { getEndDate } = require('./lib/end-date') +const { getEndDate } = require('./end-date') -const { getFormatPoints, getFormatPurposes } = require('./lib/nald-queries/returns') +const { getFormatPoints, getFormatPurposes } = require('./nald-queries/returns') /** * Gets the purposes together with their points, agreements and conditions diff --git a/src/modules/charging-import/mappers/charge-versions.js b/src/modules/charge-versions/mappers/charge-versions.js similarity index 100% rename from src/modules/charging-import/mappers/charge-versions.js rename to src/modules/charge-versions/mappers/charge-versions.js diff --git a/src/modules/charge-versions/process-steps.js b/src/modules/charge-versions/process-steps.js new file mode 100644 index 00000000..f6db13d6 --- /dev/null +++ b/src/modules/charge-versions/process-steps.js @@ -0,0 +1,51 @@ +'use strict' + +const db = require('../../lib/connectors/db.js') +const chargingQueries = require('./lib/queries/charging') +const chargeVersionMetadataImportService = require('./services/charge-version-metadata-import.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') +const queryLoader = require('./lib/query-loader') +const transformPermit = require('./lib/transform-permit/transform-permit.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('charge-versions started') + + const startTime = currentTimeInNanoseconds() + + const licenceNumbers = await _licenceNumbers() + + for (const licenceNumber of licenceNumbers) { + const licenceData = await transformPermit.getLicenceJson(licenceNumber) + + await chargeVersionMetadataImportService.importChargeVersionMetadataForLicence(licenceData) + } + + await queryLoader.loadQueries([ + chargingQueries.importChargeVersions, + chargingQueries.importChargeElements, + chargingQueries.cleanupChargeElements, + chargingQueries.cleanupChargeVersions + ]) + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'charge-versions complete') + } catch (error) { + global.GlobalNotifier.oops('charge-versions failed') + } + + return processComplete +} + +async function _licenceNumbers () { + const query = 'SELECT "LIC_NO" FROM "import"."NALD_ABS_LICENCES";' + + return db.query(query) +} + +module.exports = { + go +} diff --git a/src/modules/charge-versions/routes.js b/src/modules/charge-versions/routes.js new file mode 100644 index 00000000..c0890400 --- /dev/null +++ b/src/modules/charge-versions/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers') + +const routes = [ + { + method: 'post', + handler: controllers.chargeVersions, + path: '/charge-versions' + } +] + +module.exports = routes diff --git a/src/modules/charging-import/services/charge-version-metadata-import.js b/src/modules/charge-versions/services/charge-version-metadata-import.js similarity index 100% rename from src/modules/charging-import/services/charge-version-metadata-import.js rename to src/modules/charge-versions/services/charge-version-metadata-import.js diff --git a/src/modules/charging-import/controller.js b/src/modules/charging-import/controller.js deleted file mode 100644 index bcc71698..00000000 --- a/src/modules/charging-import/controller.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const chargeVersionsJob = require('./jobs/charge-versions') -const chargingDataJob = require('./jobs/charging-data.js') - -/** - * Run SQL queries to import charge versions / elements into - * water service tables from NALD import tables - * @return {Promise} - */ -const postImportChargeVersions = async (request, h) => { - await request.messageQueue.deleteQueue(chargeVersionsJob.jobName) - await request.messageQueue.publish(chargeVersionsJob.createMessage()) - - return h.response().code(204) -} - -const postImportChargingData = async (request, h) => { - await request.messageQueue.deleteQueue(chargingDataJob.jobName) - await request.messageQueue.publish(chargingDataJob.createMessage()) - - return h.response().code(204) -} - -module.exports = { - postImportChargeVersions, - postImportChargingData -} diff --git a/src/modules/charging-import/jobs/charge-versions.js b/src/modules/charging-import/jobs/charge-versions.js deleted file mode 100644 index 9c81a9b6..00000000 --- a/src/modules/charging-import/jobs/charge-versions.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict' - -/** - * Creates new PRESROC `water.charge_versions` records or updates existing ones based on `import.NALD_CHG_VERSIONS` - * @module ChargeVersionsJob - * - * The job was written as a one-off to create the charge version records WRLS billing needed when it first went live. - * It was written at a time there was only the PRESROC scheme. - * - * Now we have SROC and this process (creating the `water_import.charge_versions_metadata` source data and then - * updating `water.charge_versions` using it) has no knowledge of it. Nor does NALD for that matter. A completely - * different one-off mechanism (the charge version upload in water-abstraction-service) was built to handle creating - * the SROC charge versions needed to support SROC billing. - * - * When devs and testers build or rebuild a local environment you still need to manually trigger this redundant import - * job to create your base PRESROC charge versions. But it only deals with PRESROC. Any licences with a start date - * after 2022-04-01 will need you to manually create their charge versions. - */ - -const chargingQueries = require('../lib/queries/charging') -const chargeVersionMetadataImportService = require('../services/charge-version-metadata-import.js') -const importService = require('../../../lib/services/import.js') -const job = require('../lib/job') -const queryLoader = require('../lib/query-loader') -const transformPermit = require('../../nald-import/transform-permit.js') - -const jobName = 'import.charge-versions' - -const createMessage = () => job.createMessage(jobName) - -const handler = async () => { - try { - global.GlobalNotifier.omg('import.charge-versions: started') - - const licenceNumbers = await importService.getLicenceNumbers() - - for (const licenceNumber of licenceNumbers) { - const licenceData = await transformPermit.getLicenceJson(licenceNumber) - - await chargeVersionMetadataImportService.importChargeVersionMetadataForLicence(licenceData) - } - - await queryLoader.loadQueries([ - chargingQueries.importChargeVersions, - chargingQueries.importChargeElements, - chargingQueries.cleanupChargeElements, - chargingQueries.cleanupChargeVersions - ]) - global.GlobalNotifier.omg('import.charge-versions: finished') - } catch (error) { - global.GlobalNotifier.omfg('import.charge-versions: errored', error) - throw error - } -} - -module.exports = { - jobName, - createMessage, - handler -} diff --git a/src/modules/charging-import/jobs/charging-data.js b/src/modules/charging-import/jobs/charging-data.js deleted file mode 100644 index cf32182f..00000000 --- a/src/modules/charging-import/jobs/charging-data.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict' - -/** - * @note: this needs to remain and should not be deleted - */ - -const job = require('../lib/job') -const queryLoader = require('../lib/query-loader') -const purposesQueries = require('../lib/queries/purposes') -const financialAgreementTypeQueries = require('../lib/queries/financial-agreement-types') - -const jobName = 'import.charging-data' - -const createMessage = () => job.createMessage(jobName) - -const handler = async () => { - try { - global.GlobalNotifier.omg('import.charging-data: started') - - await queryLoader.loadQueries([ - financialAgreementTypeQueries.importFinancialAgreementTypes, - purposesQueries.importPrimaryPurposes, - purposesQueries.importSecondaryPurposes, - purposesQueries.importUses, - purposesQueries.importValidPurposeCombinations - ]) - - global.GlobalNotifier.omg('import.charging-data: finished') - } catch (error) { - global.GlobalNotifier.omfg('import.charging-data: errored', error) - throw error - } -} - -module.exports = { - jobName, - createMessage, - handler -} diff --git a/src/modules/charging-import/plugin.js b/src/modules/charging-import/plugin.js deleted file mode 100644 index e977cfe7..00000000 --- a/src/modules/charging-import/plugin.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const chargeVersionsJob = require('./jobs/charge-versions') -const chargingDataJob = require('./jobs/charging-data') - -const registerSubscribers = async server => { - // Register handlers - await server.messageQueue.subscribe(chargeVersionsJob.jobName, chargeVersionsJob.handler) - await server.messageQueue.subscribe(chargingDataJob.jobName, chargingDataJob.handler) -} - -const plugin = { - name: 'importChargingData', - dependencies: ['pgBoss'], - register: server => registerSubscribers(server) -} - -module.exports = { - plugin -} diff --git a/src/modules/charging-import/routes.js b/src/modules/charging-import/routes.js deleted file mode 100644 index 62f8ab9f..00000000 --- a/src/modules/charging-import/routes.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -const controller = require('./controller') - -module.exports = [ - { - method: 'post', - handler: controller.postImportChargeVersions, - path: '/import/charge-versions' - }, - { - method: 'post', - handler: controller.postImportChargingData, - path: '/import/charging-data' - } -] diff --git a/src/modules/clean/controllers.js b/src/modules/clean/controllers.js new file mode 100644 index 00000000..70980d65 --- /dev/null +++ b/src/modules/clean/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function clean (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + clean +} diff --git a/src/modules/clean/process-steps.js b/src/modules/clean/process-steps.js new file mode 100644 index 00000000..f8c32696 --- /dev/null +++ b/src/modules/clean/process-steps.js @@ -0,0 +1,33 @@ +'use strict' + +const DocumentsStep = require('./steps/documents.js') +const DocumentHeadersStep = require('./steps/document-headers.js') +const ReturnVersionsStep = require('./steps/return-versions.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('clean started') + + const startTime = currentTimeInNanoseconds() + + await DocumentsStep.go() + await DocumentHeadersStep.go() + await ReturnVersionsStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'clean complete') + } catch (error) { + global.GlobalNotifier.oops('clean failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/clean/routes.js b/src/modules/clean/routes.js new file mode 100644 index 00000000..c076a56e --- /dev/null +++ b/src/modules/clean/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers.js') + +const routes = [ + { + method: 'post', + handler: controllers.clean, + path: '/clean' + } +] + +module.exports = routes diff --git a/src/modules/clean/steps/document-headers.js b/src/modules/clean/steps/document-headers.js new file mode 100644 index 00000000..4b2dea23 --- /dev/null +++ b/src/modules/clean/steps/document-headers.js @@ -0,0 +1,40 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') + +async function go () { + try { + global.GlobalNotifier.omg('clean.document-headers started') + + const startTime = currentTimeInNanoseconds() + + await _clean() + + calculateAndLogTimeTaken(startTime, 'clean.document-headers complete') + } catch (error) { + global.GlobalNotifier.omfg('clean.document-headers errored', error) + throw error + } +} + +async function _clean () { + return db.query(` + UPDATE + crm.document_header + SET + date_deleted = now() + WHERE + system_external_id NOT IN ( + SELECT + l."LIC_NO" + FROM + "import"."NALD_ABS_LICENCES" l + ) + AND date_deleted IS NULL; + `) +} + +module.exports = { + go +} diff --git a/src/modules/clean/steps/documents.js b/src/modules/clean/steps/documents.js new file mode 100644 index 00000000..799b8039 --- /dev/null +++ b/src/modules/clean/steps/documents.js @@ -0,0 +1,39 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') + +async function go () { + try { + global.GlobalNotifier.omg('clean.documents started') + + const startTime = currentTimeInNanoseconds() + + await _clean() + + calculateAndLogTimeTaken(startTime, 'clean.documents complete') + } catch (error) { + global.GlobalNotifier.omfg('clean.documents errored', error) + throw error + } +} + +async function _clean () { + return db.query(` + UPDATE + crm_v2.documents + SET + date_deleted = now() + WHERE + document_ref NOT IN ( + SELECT + l."LIC_NO" + FROM import."NALD_ABS_LICENCES" l + ) + AND date_deleted IS NULL; + `) +} + +module.exports = { + go +} diff --git a/src/modules/clean/steps/return-versions.js b/src/modules/clean/steps/return-versions.js new file mode 100644 index 00000000..b00d9042 --- /dev/null +++ b/src/modules/clean/steps/return-versions.js @@ -0,0 +1,165 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') + +async function go () { + try { + global.GlobalNotifier.omg('clean.return-versions started') + + const startTime = currentTimeInNanoseconds() + + // Delete any return requirement points linked to deleted NALD return requirements + await _cleanPoints() + + // Delete any return requirement purposes linked to deleted NALD return requirements + await _cleanPurposes() + + // Delete any return requirements linked to deleted NALD return requirements + await _cleanRequirements() + + // Delete any return versions that have no return requirements and that are linked to deleted return versions + await _cleanVersions() + + // Update the mod logs to remove the return version ID for where the return version has now been deleted + await _cleanModLogs() + + calculateAndLogTimeTaken(startTime, 'clean.return-versions complete') + } catch (error) { + global.GlobalNotifier.omfg('clean.return-versions errored', error) + throw error + } +} + +async function _cleanPoints () { + return db.query(` + WITH nald_return_requirements AS ( + SELECT concat_ws(':', nrf."FGAC_REGION_CODE", nrf."ID") AS nald_id + FROM "import"."NALD_RET_FORMATS" nrf + ) + DELETE FROM water.return_requirement_points rrp WHERE rrp.return_requirement_id IN ( + SELECT + rr.return_requirement_id + FROM + water.return_requirements rr + WHERE + NOT EXISTS ( + SELECT 1 + FROM nald_return_requirements nrr + WHERE rr.external_id = nrr.nald_id + ) + AND NOT EXISTS ( + SELECT 1 + FROM "returns"."returns" rl + WHERE + rl.return_requirement = rr.legacy_id::varchar + LIMIT 1 + ) + ); + `) +} + +async function _cleanPurposes () { + return db.query(` + WITH nald_return_requirements AS ( + SELECT concat_ws(':', nrf."FGAC_REGION_CODE", nrf."ID") AS nald_id + FROM "import"."NALD_RET_FORMATS" nrf + ) + DELETE FROM water.return_requirement_purposes rrp WHERE rrp.return_requirement_id IN ( + SELECT + rr.return_requirement_id + FROM + water.return_requirements rr + WHERE + NOT EXISTS ( + SELECT 1 + FROM nald_return_requirements nrr + WHERE rr.external_id = nrr.nald_id + ) + AND NOT EXISTS ( + SELECT 1 + FROM "returns"."returns" rl + WHERE + rl.return_requirement = rr.legacy_id::varchar + LIMIT 1 + ) + ); + `) +} + +async function _cleanRequirements () { + return db.query(` + WITH nald_return_requirements AS ( + SELECT concat_ws(':', nrf."FGAC_REGION_CODE", nrf."ID") AS nald_id + FROM "import"."NALD_RET_FORMATS" nrf + ) + DELETE FROM water.return_requirements WHERE return_requirement_id IN ( + SELECT + rr.return_requirement_id + FROM + water.return_requirements rr + WHERE + NOT EXISTS ( + SELECT 1 + FROM nald_return_requirements nrr + WHERE rr.external_id = nrr.nald_id + ) + AND NOT EXISTS ( + SELECT 1 + FROM "returns"."returns" rl + WHERE + rl.return_requirement = rr.legacy_id::varchar + LIMIT 1 + ) + ); + `) +} + +async function _cleanVersions () { + return db.query(` + WITH nald_return_versions AS ( + SELECT concat_ws(':', nv."FGAC_REGION_CODE", nv."AABL_ID", nv."VERS_NO") AS nald_id + FROM "import"."NALD_RET_VERSIONS" nv + ) + DELETE FROM water.return_versions WHERE return_version_id IN ( + SELECT + rv.return_version_id + FROM + water.return_versions rv + WHERE + NOT EXISTS ( + SELECT 1 + FROM nald_return_versions nrv + WHERE rv.external_id = nrv.nald_id + ) + AND NOT EXISTS ( + SELECT 1 + FROM water.return_requirements rr + WHERE + rr.return_version_id = rv.return_version_id + LIMIT 1 + ) + ); + `) +} + +async function _cleanModLogs () { + return db.query(` + UPDATE + water.mod_logs ml + SET + return_version_id = NULL + WHERE + ml.return_version_id IS NOT NULL + AND ml.return_version_id NOT IN ( + SELECT + rv.return_version_id + FROM + water.return_versions rv + ); + `) +} + +module.exports = { + go +} diff --git a/src/modules/company-details/controllers.js b/src/modules/company-details/controllers.js new file mode 100644 index 00000000..6de1ef39 --- /dev/null +++ b/src/modules/company-details/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function companyDetails (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + companyDetails +} diff --git a/src/modules/company-details/lib/fetcher.js b/src/modules/company-details/lib/fetcher.js new file mode 100644 index 00000000..4e81c1a7 --- /dev/null +++ b/src/modules/company-details/lib/fetcher.js @@ -0,0 +1,102 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function go (regionCode, partyId) { + const licenceVersions = await _licenceVersions(regionCode, partyId) + const licenceRoles = await _licenceRoles(regionCode, partyId) + + const addressIds = _addressIds(licenceVersions, licenceRoles) + + const addresses = await _addresses(regionCode, addressIds) + + return { addresses, licenceVersions, licenceRoles } +} + +function _addressIds (licenceVersions, licenceRoles) { + const allAddressIds = [] + + licenceVersions.forEach((record) => { + allAddressIds.push(record.ACON_AADD_ID) + }) + + licenceRoles.forEach((record) => { + allAddressIds.push(record.ACON_AADD_ID) + }) + + // Creates a distinct array of address IDs + return [...new Set(allAddressIds)] +} + +async function _addresses (regionCode, addressIds) { + const query = ` + SELECT + "ID", + "ADDR_LINE1", + "ADDR_LINE2", + "ADDR_LINE3", + "ADDR_LINE4", + "TOWN", + "COUNTY", + "POSTCODE", + "COUNTRY", + "FGAC_REGION_CODE" + FROM + "import"."NALD_ADDRESSES" + WHERE + "FGAC_REGION_CODE" = $1 + AND "ID" = ANY (string_to_array($2, ',')::TEXT[]); + ` + + return db.query(query, [regionCode, addressIds.join(',')]) +} + +async function _licenceRoles (regionCode, partyId) { + const query = ` + SELECT + "ID", + "ALRT_CODE", + "ACON_APAR_ID", + "ACON_AADD_ID", + "EFF_ST_DATE", + "EFF_END_DATE", + "FGAC_REGION_CODE" + FROM + "import"."NALD_LIC_ROLES" + WHERE + "FGAC_REGION_CODE"=$1 + AND "ACON_APAR_ID"=$2; + ` + + return db.query(query, [regionCode, partyId]) +} + +async function _licenceVersions (regionCode, partyId) { + const query = ` + SELECT + lv."AABL_ID", + lv."EFF_ST_DATE", + lv."ACON_APAR_ID", + lv."ACON_AADD_ID", + lv."EFF_END_DATE", + lv."FGAC_REGION_CODE", + l."REV_DATE", + l."LAPSED_DATE", + l."EXPIRY_DATE" + FROM + "import"."NALD_ABS_LIC_VERSIONS" lv + JOIN "import"."NALD_ABS_LICENCES" l ON + lv."AABL_ID" = l."ID" + AND lv."FGAC_REGION_CODE" = l."FGAC_REGION_CODE" + WHERE + lv."FGAC_REGION_CODE" = $1 + AND lv."ACON_APAR_ID" = $2 + AND lv."STATUS" <> 'DRAFT'; + ` + + return db.query(query, [regionCode, partyId]) +} + +module.exports = { + go +} diff --git a/src/modules/company-details/lib/loader.js b/src/modules/company-details/lib/loader.js new file mode 100644 index 00000000..da25e007 --- /dev/null +++ b/src/modules/company-details/lib/loader.js @@ -0,0 +1,346 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const Fetcher = require('./fetcher.js') +const Transformer = require('./transformer.js') + +async function go (party) { + const { FGAC_REGION_CODE: regionCode, ID: partyId } = party + + try { + const { addresses, licenceRoles, licenceVersions } = await Fetcher.go(regionCode, partyId) + + const transformedPartyData = Transformer.go(party, licenceVersions, licenceRoles, addresses) + + await _persistCompany(transformedPartyData) + await _persistAddresses(transformedPartyData) + await _persistLicenceHolderContact(transformedPartyData.licenceHolderContact) + await _persistCompanyContact(transformedPartyData) + await _persistCompanyAddresses(transformedPartyData) + } catch (error) { + global.GlobalNotifier.omfg('company-details.import errored', error, { partyId, regionCode }) + throw error + } +} + +async function _persistAddresses (transformedPartyData) { + const allRoleAddresses = transformedPartyData.roleAddresses.map((roleAddress) => { + return roleAddress.address + }) + + // Create a set () unique list of all the role addresses. The second arg is a function the set will use to uniquely + // identify each role address. This is because a roleAddress has the following schema + // + // { + // role: 'licenceHolder', + // startDate: '1967-06-01', + // endDate: '1967-10-01', + // address: { // Address properties } + // } + // + // There might be two role addresses, for example, licence holder and returns to, both of which link to the same + // address. + const addressesSet = new Set( + allRoleAddresses, + (roleAddress) => { + return roleAddress.externalId + } + ) + + const uniqueAddresses = [...addressesSet] + + for (const address of uniqueAddresses) { + await _persistAddress(address) + } +} + +async function _persistAddress (address) { + const params = [ + address.address1, + address.address2, + address.address3, + address.address4, + address.town, + address.county, + address.postcode, + address.country, + address.externalId + ] + const query = ` + INSERT INTO crm_v2.addresses ( + address_1, + address_2, + address_3, + address_4, + town, + county, + postcode, + country, + external_id, + data_source, + date_created, + date_updated, + current_hash + ) + VALUES ( + $1, + $2, + $3, + $4, + $5, + $6, + $7, + $8, + $9, + 'nald', + NOW(), + NOW(), + md5( + CONCAT( + $1::varchar, + $2::varchar, + $3::varchar, + $4::varchar, + $5::varchar, + $6::varchar, + $7::varchar + )::varchar + ) + ) ON + CONFLICT (external_id) DO UPDATE + SET + address_1 = EXCLUDED.address_1, + address_2 = EXCLUDED.address_2, + address_3 = EXCLUDED.address_3, + address_4 = EXCLUDED.address_4, + town = EXCLUDED.town, + county = EXCLUDED.county, + postcode = EXCLUDED.postcode, + country = EXCLUDED.country, + last_hash = EXCLUDED.current_hash, + current_hash = md5( + CONCAT( + EXCLUDED.address_1::varchar, + EXCLUDED.address_2::varchar, + EXCLUDED.address_3::varchar, + EXCLUDED.address_4::varchar, + EXCLUDED.town::varchar, + EXCLUDED.county::varchar, + EXCLUDED.postcode::varchar + )::varchar + ), + date_updated = EXCLUDED.date_updated; + ` + + return db.query(query, params) +} + +async function _persistCompanyAddress (roleAddress, companyExternalId) { + const params = [companyExternalId, roleAddress.address.externalId, roleAddress.role, roleAddress.startDate, roleAddress.endDate] + const query = ` + INSERT INTO crm_v2.company_addresses ( + company_id, + address_id, + role_id, + start_date, + end_date, + is_default, + date_created, + date_updated + ) + SELECT + c.company_id, + a.address_id, + r.role_id, + $4, + $5, + TRUE, + NOW(), + NOW() + FROM + crm_v2.companies c + JOIN crm_v2.addresses a + ON a.external_id = $2 + JOIN crm_v2.roles r + ON r.name = $3 + WHERE + c.external_id = $1 + ON CONFLICT ( + company_id, + address_id, + role_id + ) DO UPDATE + SET + address_id = EXCLUDED.address_id, + is_default = EXCLUDED.is_default, + end_date = EXCLUDED.end_date, + date_updated = EXCLUDED.date_updated; + ` + + return db.query(query, params) +} + +async function _persistCompanyAddresses (transformedPartyData) { + for (const roleAddress of transformedPartyData.roleAddresses) { + await _persistCompanyAddress(roleAddress, transformedPartyData.externalId) + } +} + +async function _persistLicenceHolderContact (licenceHolderContact) { + if (!licenceHolderContact) { + return null + } + + const { externalId, firstName, initials, lastName, salutation } = licenceHolderContact.contact + const params = [salutation, initials, firstName, lastName, externalId] + const query = ` + INSERT INTO crm_v2.contacts ( + salutation, + initials, + first_name, + last_name, + external_id, + data_source, + date_created, + date_updated, + current_hash + ) + VALUES ( + $1, + $2, + $3, + $4, + $5, + 'nald', + NOW(), + NOW(), + md5( + CONCAT( + $1::varchar, + $3::varchar, + $4::varchar + )::varchar + ) + ) ON CONFLICT (external_id) DO UPDATE + SET + salutation = EXCLUDED.salutation, + initials = EXCLUDED.initials, + first_name = EXCLUDED.first_name, + last_name = EXCLUDED.last_name, + external_id = EXCLUDED.external_id, + date_updated = EXCLUDED.date_updated, + last_hash = EXCLUDED.current_hash, + current_hash = md5( + CONCAT( + EXCLUDED.salutation::varchar, + EXCLUDED.first_name::varchar, + EXCLUDED.last_name::varchar + )::varchar + ); + ` + + return db.query(query, params) +} + +async function _persistCompany (transformedPartyData) { + const params = [transformedPartyData.name, transformedPartyData.type, transformedPartyData.externalId] + + const query = ` + INSERT INTO crm_v2.companies ( + name, + TYPE, + external_id, + date_created, + date_updated, + current_hash + ) + VALUES ( + $1, + $2, + $3, + NOW(), + NOW(), + md5( + CONCAT( + $1::varchar, + $2::varchar + )::varchar + ) + ) + ON CONFLICT (external_id) DO UPDATE + SET + name = EXCLUDED.name, + date_updated = EXCLUDED.date_updated, + TYPE = EXCLUDED.type, + last_hash = EXCLUDED.current_hash, + current_hash = md5( + CONCAT( + EXCLUDED.name::varchar, + EXCLUDED.type::varchar + )::varchar + ); + ` + + return db.query(query, params) +} + +async function _persistCompanyContact (transformedPartyData) { + if (!transformedPartyData.licenceHolderContact) { + return null + } + + const { externalId, licenceHolderContact } = transformedPartyData + const params = [ + externalId, + licenceHolderContact.contact.externalId, + licenceHolderContact.role, + licenceHolderContact.startDate, + licenceHolderContact.endDate + ] + const query = ` + INSERT INTO crm_v2.company_contacts ( + company_id, + contact_id, + role_id, + start_date, + end_date, + is_default, + date_created, + date_updated + ) + SELECT + c.company_id, + o.contact_id, + r.role_id, + $4, + $5, + TRUE, + NOW(), + NOW() + FROM + crm_v2.companies c + JOIN crm_v2.contacts o + ON o.external_id = $2 + JOIN crm_v2.roles r + ON r.name = $3 + WHERE + c.external_id = $1 + ON CONFLICT ( + company_id, + contact_id, + role_id, + start_date + ) DO UPDATE + SET + contact_id = EXCLUDED.contact_id, + is_default = EXCLUDED.is_default, + end_date = EXCLUDED.end_date, + date_updated = EXCLUDED.date_updated; + ` + + return db.query(query, params) +} + +module.exports = { + go +} diff --git a/src/modules/company-details/lib/transformer.js b/src/modules/company-details/lib/transformer.js new file mode 100644 index 00000000..d3bac9b7 --- /dev/null +++ b/src/modules/company-details/lib/transformer.js @@ -0,0 +1,178 @@ +'use strict' + +const DateHelpers = require('../../../lib/date-helpers.js') + +function go (party, licenceVersions, licenceRoles, naldAddresses) { + const company = _company(party) + const contact = _contact(party) + const addresses = _addresses(naldAddresses) + + const licenceHolderAddresses = _licenceHolderAddresses(licenceVersions, addresses) + const licenceRoleAddresses = _licenceRoleAddresses(licenceRoles, addresses) + const licenceHolderContact = _licenceHolderContact(contact, licenceVersions) + + company.roleAddresses = [...licenceHolderAddresses, ...licenceRoleAddresses] + company.licenceHolderContact = licenceHolderContact + + return company +} + +function _address (address) { + return { + address1: _null(address.ADDR_LINE1), + address2: _null(address.ADDR_LINE2), + address3: _null(address.ADDR_LINE3), + address4: _null(address.ADDR_LINE4), + town: _null(address.TOWN), + county: _null(address.COUNTY), + postcode: _null(address.POSTCODE), + country: _null(address.COUNTRY), + externalId: `${address.FGAC_REGION_CODE}:${address.ID}` + } +} + +function _addresses (addresses) { + return addresses.map((address) => { + return _address(address) + }) +} + +function _company (party) { + const name = _name(party) + const type = party.APAR_TYPE === 'PER' ? 'person' : 'organisation' + + return { + type, + name, + externalId: `${party.FGAC_REGION_CODE}:${party.ID}` + } +} + +function _contact (party) { + if (party.APAR_TYPE === 'ORG') { + return null + } + + return { + salutation: _null(party.SALUTATION), + initials: _null(party.INITIALS), + firstName: _null(party.FORENAME), + lastName: _null(party.NAME), + externalId: `${party.FGAC_REGION_CODE}:${party.ID}` + } +} + +function _licenceHolderAddresses (licenceVersions, addresses) { + // Sort licence versions by start date + const sortedLicenceVersions = licenceVersions.sort((licenceVersion1, licenceVersion2) => { + const formattedStartDate1 = DateHelpers.mapNaldDate(licenceVersion1.EFF_ST_DATE) + const formattedStartDate2 = DateHelpers.mapNaldDate(licenceVersion2.EFF_ST_DATE) + + if ((licenceVersion1, formattedStartDate1) > (licenceVersion2, formattedStartDate2)) { + return 1 + } else { + return -1 + } + }) + + // Get the widest date range for each address + const mapped = {} + + for (const licenceVersion of sortedLicenceVersions) { + const id = licenceVersion.ACON_AADD_ID + const currentStart = mapped[id]?.startDate + const currentEnd = mapped[id]?.endDate + const transformedStartDate = DateHelpers.mapNaldDate(licenceVersion.EFF_ST_DATE) + const address = addresses.find((address) => { + return address.externalId === `${licenceVersion.FGAC_REGION_CODE}:${id}` + }) + + mapped[id] = { + role: 'licenceHolder', + startDate: DateHelpers.getMinDate([transformedStartDate, currentStart]), + endDate: DateHelpers.getEndDate(licenceVersion, currentEnd), + address + } + } + + return Object.values(mapped) +} + +function _licenceHolderContact (contact, licenceVersions) { + if (!contact || licenceVersions.length === 0) { + return null + } + + const startDates = licenceVersions.map((licenceVersion) => { + return DateHelpers.mapNaldDate(licenceVersion.EFF_ST_DATE) + }) + + return { + role: 'licenceHolder', + startDate: DateHelpers.getMinDate(startDates), + endDate: null, + contact + } +} + +function _licenceRoleAddresses (licenceRoles, addresses) { + // Group by roles with the same address and role + let grouped = {} + + if (licenceRoles) { + grouped = licenceRoles.reduce((group, item) => { + const groupingKey = `${item.FGAC_REGION_CODE}.${item.ACON_AADD_ID}.${item.ALRT_CODE}` + + group[groupingKey] = group[groupingKey] ?? [] + group[groupingKey].push(item) + + return group + }, {}) + } + + return Object.values(grouped).map((addressGroup) => { + const { FGAC_REGION_CODE: regionCode, ACON_AADD_ID: addressId, ALRT_CODE: roleCode } = addressGroup[0] + const startDates = addressGroup.map((row) => { + return DateHelpers.mapNaldDate(row.EFF_ST_DATE) + }) + + const endDates = addressGroup.map((row) => { + return DateHelpers.mapNaldDate(row.EFF_END_DATE) + }) + + const address = addresses.find((address) => { + return address.externalId === `${regionCode}:${addressId}` + }) + + return { + role: roleCode === 'RT' ? 'returnsTo' : null, + startDate: DateHelpers.getMinDate(startDates), + endDate: DateHelpers.getMaxDate(endDates), + address + } + }) +} + +function _name (party) { + const firstNameKey = party.FORENAME === 'null' ? 'INITIALS' : 'FORENAME' + + const parts = [_null(party.SALUTATION), _null(party[firstNameKey]), _null(party.NAME)] + + const filteredParts = parts.filter((part) => { + return part + }) + + return filteredParts.join(' ') +} + +function _null (value) { + if (value !== 'null') { + return value + } + + return null +} + +module.exports = { + go +} diff --git a/src/modules/company-details/process-steps.js b/src/modules/company-details/process-steps.js new file mode 100644 index 00000000..c2466933 --- /dev/null +++ b/src/modules/company-details/process-steps.js @@ -0,0 +1,30 @@ +'use strict' + +const ImportStep = require('./steps/import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + let counts = {} + + try { + global.GlobalNotifier.omg('company-details started') + + const startTime = currentTimeInNanoseconds() + + counts = await ImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'company-details complete') + } catch (error) { + global.GlobalNotifier.oops('company-details failed') + } + + return { processComplete, counts } +} + +module.exports = { + go +} diff --git a/src/modules/company-details/routes.js b/src/modules/company-details/routes.js new file mode 100644 index 00000000..f04a9062 --- /dev/null +++ b/src/modules/company-details/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers.js') + +const routes = [ + { + method: 'post', + handler: controllers.companyDetails, + path: '/company-details' + } +] + +module.exports = routes diff --git a/src/modules/company-details/steps/import.js b/src/modules/company-details/steps/import.js new file mode 100644 index 00000000..f2707856 --- /dev/null +++ b/src/modules/company-details/steps/import.js @@ -0,0 +1,85 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const Loader = require('../lib/loader.js') + +const config = require('../../../../config.js') + +const PROGRESS_TICK = 1000 + +async function go () { + let count = 0 + let rejected = 0 + + try { + global.GlobalNotifier.omg('company-details.import started') + + const startTime = currentTimeInNanoseconds() + + const parties = await _parties() + + count = parties.length + + rejected = await _import(parties, count) + + calculateAndLogTimeTaken(startTime, 'company-details.import complete', { count, rejected }) + } catch (error) { + global.GlobalNotifier.omfg('company-details.import errored', error, { count, rejected }) + throw error + } + + return { count, rejected } +} + +async function _import (parties, count) { + const batchSize = config.processBatchSize + + let progress = PROGRESS_TICK + let rejected = 0 + + for (let i = 0; i < count; i += batchSize) { + if (i === progress) { + progress = progress + PROGRESS_TICK + global.GlobalNotifier.omg(`company-details.import progress (${i} of ${count})`) + } + + const partiesToProcess = parties.slice(i, i + batchSize) + + const processes = partiesToProcess.map((partyToProcess) => { + return Loader.go(partyToProcess.FGAC_REGION_CODE, partyToProcess.ID) + }) + + const results = await Promise.allSettled(processes) + const rejectedResults = results.filter((result) => { + return result.status === 'rejected' + }) + + if (rejectedResults.length === batchSize) { + throw new Error('Whole batch rejected') + } + + rejected += rejectedResults.length + } + + return rejected +} + +async function _parties () { + return db.query(` + SELECT + "ID", + "APAR_TYPE", + "NAME", + "FORENAME", + "INITIALS", + "SALUTATION", + "FGAC_REGION_CODE" + FROM + "import"."NALD_PARTIES"; + `) +} + +module.exports = { + go +} diff --git a/src/modules/core/jobs/import-tracker.js b/src/modules/core/jobs/import-tracker.js deleted file mode 100644 index c99a6c23..00000000 --- a/src/modules/core/jobs/import-tracker.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict' - -const jobsConnector = require('../../../lib/connectors/water-import/jobs') -const notifyService = require('../../../lib/services/notify') - -const config = require('../../../../config') - -const JOB_NAME = 'import.tracker' - -const createMessage = () => ({ - name: JOB_NAME, - options: { - singletonKey: JOB_NAME - } -}) - -const handler = async () => { - try { - global.GlobalNotifier.omg('import.tracker: started') - - const jobs = await jobsConnector.getFailedJobs() - // if there are any jobs that have failed in the last 12 hours - if (jobs.length > 0) { - const subTitle = jobs.length > 1 ? `There are ${jobs.length} failed import jobs in the` : 'There is 1 failed import job in the' - const content = `${subTitle} ${config.environment} environment.\n\n` + - jobs.reduce((acc, row) => { - acc = acc + `Job Name: ${row.jobName} \nTotal Errors: ${row.total} \nDate created: ${row.dateCreated} \nDate completed: ${row.dateCompleted}\n\n` - return acc - }, '') - notifyService.sendEmail(process.env.WATER_SERVICE_MAILBOX, 'service_status_alert', { content }) - } - - global.GlobalNotifier.omg('import.tracker: finished') - } catch (error) { - global.GlobalNotifier.omfg('import.tracker: errored', error) - throw error - } -} - -module.exports = { - createMessage, - handler, - jobName: JOB_NAME -} diff --git a/src/modules/core/plugin.js b/src/modules/core/plugin.js deleted file mode 100644 index a3ab5d92..00000000 --- a/src/modules/core/plugin.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' - -const cron = require('node-cron') -const importTrackerJob = require('./jobs/import-tracker') - -const config = require('../../../config') - -const registerSubscribers = async server => { - await server.messageQueue.subscribe(importTrackerJob.jobName, importTrackerJob.handler) - - // If we're not running the unit tests, schedule the import tracker job - if (process.env.NODE_ENV !== 'test') { - cron.schedule(config.import.tracker.schedule, async () => { - await server.messageQueue.publish(importTrackerJob.createMessage()) - }) - } -} - -const plugin = { - name: 'importTracker', - dependencies: ['pgBoss'], - register: registerSubscribers -} - -module.exports = { - plugin -} diff --git a/src/modules/core/routes.js b/src/modules/core/routes.js index ad84b94e..90709cc0 100644 --- a/src/modules/core/routes.js +++ b/src/modules/core/routes.js @@ -5,20 +5,12 @@ const controller = require('./controller') const status = { method: 'GET', handler: controller.getStatus, - options: { - auth: false, - description: 'Checks if the service is alive' - }, path: '/status' } const testing = { method: 'GET', handler: controller.getStatus, - options: { - auth: false, - description: 'For testing etl path mapping' - }, path: '/etl/testing' } diff --git a/src/modules/health/routes.js b/src/modules/health/routes.js index c75eb6f8..15cb09fb 100644 --- a/src/modules/health/routes.js +++ b/src/modules/health/routes.js @@ -6,18 +6,12 @@ const routes = [ { method: 'GET', path: '/health/airbrake', - handler: controller.getAirbrake, - config: { - auth: false - } + handler: controller.getAirbrake }, { method: 'GET', path: '/health/info', - handler: controller.getInfo, - config: { - auth: false - } + handler: controller.getInfo } ] diff --git a/src/modules/jobs/controller.js b/src/modules/jobs/controller.js deleted file mode 100644 index 2e41b20a..00000000 --- a/src/modules/jobs/controller.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const jobsConnector = require('../../lib/connectors/water-import/jobs') - -const getJobSummary = () => jobsConnector.getJobSummary() - -module.exports = { - getJobSummary -} diff --git a/src/modules/jobs/routes.js b/src/modules/jobs/routes.js deleted file mode 100644 index 74b899c7..00000000 --- a/src/modules/jobs/routes.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const controller = require('./controller') - -const getJobSummary = { - method: 'GET', - path: '/import/1.0/jobs/summary', - handler: controller.getJobSummary -} - -module.exports = [getJobSummary] diff --git a/src/modules/licence-details/controllers.js b/src/modules/licence-details/controllers.js new file mode 100644 index 00000000..fce2a770 --- /dev/null +++ b/src/modules/licence-details/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function licenceDetails (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + licenceDetails +} diff --git a/src/modules/licence-details/lib/fetcher.js b/src/modules/licence-details/lib/fetcher.js new file mode 100644 index 00000000..0d377613 --- /dev/null +++ b/src/modules/licence-details/lib/fetcher.js @@ -0,0 +1,231 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function go (naldLicence) { + const { ID: id, FGAC_REGION_CODE: regionCode, LIC_NO: licenceRef } = naldLicence + + const naldLicenceVersions = await _licenceVersions(regionCode, id) + const naldLicenceVersionPurposes = await _licenceVersionPurposes(regionCode, id) + const naldLicenceVersionPurposeConditions = await _licenceVersionPurposeConditions(regionCode, id) + const naldLicenceRoles = await _licenceRoles(regionCode, id) + + const { addressIds, partyIds } = _addressAndPartyIds(naldLicenceVersions, naldLicenceRoles) + + const naldAddresses = await _addresses(regionCode, addressIds) + const naldParties = await _parties(regionCode, partyIds) + + const licencePriorToImport = await _licencePriorToImport(licenceRef) + + return { + licencePriorToImport, + naldAddresses, + naldLicence, + naldLicenceRoles, + naldLicenceVersions, + naldLicenceVersionPurposes, + naldLicenceVersionPurposeConditions, + naldParties + } +} + +async function _addresses (regionCode, addressIds) { + const query = ` + SELECT + "ID", + "FGAC_REGION_CODE" + FROM + "import"."NALD_ADDRESSES" + WHERE + "FGAC_REGION_CODE" = $1 + AND "ID" = ANY (string_to_array($2, ',')::TEXT[]); + ` + + return db.query(query, [regionCode, addressIds.join(',')]) +} + +function _addressAndPartyIds (licenceVersions, licenceRoles) { + const allAddressIds = [] + const allPartyIds = [] + + licenceVersions.forEach((licenceVersion) => { + allAddressIds.push(licenceVersion.ACON_AADD_ID) + allPartyIds.push(licenceVersion.ACON_APAR_ID) + }) + + licenceRoles.forEach((licenceVersion) => { + allAddressIds.push(licenceVersion.ACON_AADD_ID) + allPartyIds.push(licenceVersion.ACON_APAR_ID) + }) + + const addressIds = [...new Set(allAddressIds)] + const partyIds = [...new Set(allPartyIds)] + + return { addressIds, partyIds } +} + +async function _licencePriorToImport (licenceRef) { + const query = ` + SELECT + licence_id, + expired_date, + lapsed_date, + revoked_date + FROM + water.licences + WHERE + licence_ref = $1; + ` + + const results = await db.query(query, [licenceRef]) + + return results[0] +} + +async function _licenceRoles (regionCode, id) { + const query = ` + SELECT + "ID", + "FGAC_REGION_CODE", + "EFF_ST_DATE", + "EFF_END_DATE", + "ACON_APAR_ID", + "ACON_AADD_ID" + FROM + "import"."NALD_LIC_ROLES" + WHERE + "FGAC_REGION_CODE" = $1 + AND "AABL_ID" = $2 + AND "ALRT_CODE" = 'RT' + ORDER BY + to_date("EFF_ST_DATE", 'DD/MM/YYYY'); + ` + + return db.query(query, [regionCode, id]) +} + +async function _licenceVersions (regionCode, id) { + const query = ` + SELECT + "AABL_ID", + "FGAC_REGION_CODE", + "ISSUE_NO", + "INCR_NO", + "STATUS", + "EFF_ST_DATE", + "EFF_END_DATE", + "ACON_AADD_ID", + "ACON_APAR_ID" + FROM + "import"."NALD_ABS_LIC_VERSIONS" + WHERE + "FGAC_REGION_CODE"=$1 + AND "AABL_ID"=$2 + AND "STATUS"<>'DRAFT'; + ` + const results = await db.query(query, [regionCode, id]) + + if (results.length === 0) { + throw new Error('Licence has no matching licence versions') + } + + return results +} + +async function _licenceVersionPurposes (regionCode, id) { + const query = ` + SELECT + purposes."ID", + purposes."FGAC_REGION_CODE", + purposes."INST_QTY", + purposes."HOURLY_QTY", + purposes."DAILY_QTY", + purposes."ANNUAL_QTY", + purposes."AABV_ISSUE_NO", + purposes."AABV_INCR_NO", + purposes."APUR_APPR_CODE", + purposes."APUR_APSE_CODE", + purposes."APUR_APUS_CODE", + purposes."PERIOD_ST_DAY", + purposes."PERIOD_ST_MONTH", + purposes."PERIOD_END_DAY", + purposes."PERIOD_END_MONTH", + purposes."TIMELTD_ST_DATE", + purposes."TIMELTD_END_DATE", + purposes."NOTES" + FROM + "import"."NALD_ABS_LIC_VERSIONS" versions + JOIN + "import"."NALD_ABS_LIC_PURPOSES" purposes + ON + versions."AABL_ID" = purposes."AABV_AABL_ID" + AND versions."ISSUE_NO" = purposes."AABV_ISSUE_NO" + AND versions."INCR_NO" = purposes."AABV_INCR_NO" + AND versions."FGAC_REGION_CODE" = purposes."FGAC_REGION_CODE" + WHERE + versions."FGAC_REGION_CODE" = $1 + AND versions."AABL_ID" = $2 + AND versions."STATUS"<>'DRAFT'; + ` + + const results = await db.query(query, [regionCode, id]) + + if (results.length === 0) { + throw new Error('Licence has no matching licence version purposes') + } + + return results +} + +async function _licenceVersionPurposeConditions (regionCode, id) { + const query = ` + SELECT + conditions."ID", + conditions."FGAC_REGION_CODE", + conditions."AABP_ID", + conditions."ACIN_CODE", + conditions."ACIN_SUBCODE", + conditions."PARAM1", + conditions."PARAM2", + conditions."TEXT" + FROM + "import"."NALD_ABS_LIC_VERSIONS" versions + JOIN + "import"."NALD_ABS_LIC_PURPOSES" purposes + ON + versions."AABL_ID" = purposes."AABV_AABL_ID" + AND versions."ISSUE_NO" = purposes."AABV_ISSUE_NO" + AND versions."INCR_NO" = purposes."AABV_INCR_NO" + AND versions."FGAC_REGION_CODE" = purposes."FGAC_REGION_CODE" + JOIN + "import"."NALD_LIC_CONDITIONS" conditions + ON + purposes."FGAC_REGION_CODE" = conditions."FGAC_REGION_CODE" + AND purposes."ID" = conditions."AABP_ID" + WHERE + versions."FGAC_REGION_CODE" = $1 + AND versions."AABL_ID" = $2 + AND versions."STATUS"<>'DRAFT'; + ` + + return db.query(query, [regionCode, id]) +} + +async function _parties (regionCode, partyIds) { + const query = ` + SELECT + "ID", + "FGAC_REGION_CODE" + FROM + "import"."NALD_PARTIES" + WHERE + "FGAC_REGION_CODE" = $1 + AND "ID" = ANY (string_to_array($2, ',')::TEXT[]); + ` + + return db.query(query, [regionCode, partyIds.join(',')]) +} + +module.exports = { + go +} diff --git a/src/modules/licence-details/lib/legacy.js b/src/modules/licence-details/lib/legacy.js new file mode 100644 index 00000000..ffa76459 --- /dev/null +++ b/src/modules/licence-details/lib/legacy.js @@ -0,0 +1,35 @@ +'use strict' + +// To be able to focus on dropping pg-boss from the import and get it running again we have not had the time to +// update all legacy transformation code. We found this gnarly piece of work and decided there would be too much risk +// to attempt to re-write it in the time we have. +function isLicenceVersionReplaced (licenceVersion, licenceVersions) { + return licenceVersions.some(comparisonLicenceVersion => { + const isSameStartDate = comparisonLicenceVersion.EFF_ST_DATE === licenceVersion.EFF_ST_DATE + const isFollowingVersion = _compareLicenceVersions(licenceVersion, comparisonLicenceVersion) === 1 + return isSameStartDate && isFollowingVersion + }) +} + +function _compareLicenceVersions (licenceVersionA, licenceVersionB) { + const versionA = _getVersion(licenceVersionA) + const versionB = _getVersion(licenceVersionB) + if (versionA.issue === versionB.issue) { + if (versionA.increment === versionB.increment) { + return 0 + } + return versionA.increment > versionB.increment ? -1 : +1 + } + return versionA.issue > versionB.issue ? -1 : +1 +} + +function _getVersion (licenceVersion) { + return { + issue: parseInt(licenceVersion.ISSUE_NO), + increment: parseInt(licenceVersion.INCR_NO) + } +} + +module.exports = { + isLicenceVersionReplaced +} diff --git a/src/modules/licence-details/lib/loader.js b/src/modules/licence-details/lib/loader.js new file mode 100644 index 00000000..b7abc0fd --- /dev/null +++ b/src/modules/licence-details/lib/loader.js @@ -0,0 +1,514 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const Fetcher = require('./fetcher.js') +const Transformer = require('./transformer.js') + +async function go (naldLicence) { + const { LIC_NO: licenceRef } = naldLicence + + try { + const { + licencePriorToImport, + naldAddresses, + naldLicenceRoles, + naldLicenceVersions, + naldLicenceVersionPurposes, + naldLicenceVersionPurposeConditions, + naldParties + } = await Fetcher.go(naldLicence) + + const transformedLicenceData = Transformer.go( + naldAddresses, + naldLicence, + naldLicenceRoles, + naldLicenceVersions, + naldLicenceVersionPurposes, + naldLicenceVersionPurposeConditions, + naldParties + ) + + let results = await _persistDocument(transformedLicenceData.document) + await _persistDocumentRoles(transformedLicenceData.documentRoles, results[0].document_id) + + results = await _persistLicence(transformedLicenceData.licence) + await _flagForSupplementary(licencePriorToImport, transformedLicenceData.licence, results[0].licence_id) + + await _persistLicenceVersions(transformedLicenceData.licenceVersions, results[0].licence_id) + await _persistLicenceVersionPurposes( + transformedLicenceData.licenceVersionPurposes, + transformedLicenceData.licenceVersions + ) + await _persistLicenceVersionPurposeConditions( + transformedLicenceData.licenceVersionPurposeConditions, + transformedLicenceData.licenceVersionPurposes + ) + } catch (error) { + global.GlobalNotifier.omfg('licence-details.import errored', error, { licenceRef }) + throw error + } +} + +async function _persistLicenceVersionPurposeCondition (licenceVersionPurposeCondition, licenceVersionPurposeId) { + const params = [ + licenceVersionPurposeId, + licenceVersionPurposeCondition.code, + licenceVersionPurposeCondition.subcode, + licenceVersionPurposeCondition.param1, + licenceVersionPurposeCondition.param2, + licenceVersionPurposeCondition.notes, + licenceVersionPurposeCondition.externalId + ] + const query = ` + INSERT INTO water.licence_version_purpose_conditions ( + licence_version_purpose_id, + licence_version_purpose_condition_type_id, + param_1, + param_2, + notes, + external_id, + date_created, + date_updated + ) + VALUES ( + $1, + ( + SELECT + licence_version_purpose_condition_type_id + FROM + water.licence_version_purpose_condition_types + WHERE + code = $2 + AND subcode = $3 + ), + $4, + $5, + $6, + $7, + now(), + now() + ) + ON CONFLICT (external_id) + DO UPDATE + SET + licence_version_purpose_condition_type_id = excluded.licence_version_purpose_condition_type_id, + param_1 = excluded.param_1, + param_2 = excluded.param_2, + notes = excluded.notes, + date_updated = now(); + ` + + return db.query(query, params) +} + +async function _persistLicenceVersionPurposeConditions (licenceVersionPurposeConditions, licenceVersionPurposes) { + for (const licenceVersionPurposeCondition of licenceVersionPurposeConditions) { + const matchingLicenceVersionPurpose = licenceVersionPurposes.find((licenceVersionPurpose) => { + return licenceVersionPurpose.externalId === licenceVersionPurposeCondition.purposeExternalId + }) + + if (!matchingLicenceVersionPurpose) { + throw new Error('Cannot match licence version purpose condition to licence version purpose') + } + + const { licenceVersionPurposeId } = matchingLicenceVersionPurpose + await _persistLicenceVersionPurposeCondition(licenceVersionPurposeCondition, licenceVersionPurposeId) + } +} + +async function _persistLicenceVersionPurpose (licenceVersionPurpose, licenceVersionId) { + const params = [ + licenceVersionId, + licenceVersionPurpose.purposePrimary, + licenceVersionPurpose.purposeSecondary, + licenceVersionPurpose.purposeUse, + licenceVersionPurpose.abstractionPeriodStartDay, + licenceVersionPurpose.abstractionPeriodStartMonth, + licenceVersionPurpose.abstractionPeriodEndDay, + licenceVersionPurpose.abstractionPeriodEndMonth, + licenceVersionPurpose.timeLimitedStartDate, + licenceVersionPurpose.timeLimitedEndDate, + licenceVersionPurpose.notes, + licenceVersionPurpose.instantQuantity, + licenceVersionPurpose.hourlyQuantity, + licenceVersionPurpose.dailyQuantity, + licenceVersionPurpose.annualQuantity, + licenceVersionPurpose.externalId + ] + const query = ` + INSERT INTO water.licence_version_purposes ( + licence_version_id, + purpose_primary_id, + purpose_secondary_id, + purpose_use_id, + abstraction_period_start_day, + abstraction_period_start_month, + abstraction_period_end_day, + abstraction_period_end_month, + time_limited_start_date, + time_limited_end_date, + notes, + instant_quantity, + hourly_quantity, + daily_quantity, + annual_quantity, + external_id, + date_created, + date_updated + ) + VALUES ( + $1, + ( + SELECT + purpose_primary_id + FROM + water.purposes_primary + WHERE + legacy_id = $2 + ), + ( + SELECT + purpose_secondary_id + FROM + water.purposes_secondary + WHERE + legacy_id = $3 + ), + ( + SELECT + purpose_use_id + FROM + water.purposes_uses + WHERE + legacy_id = $4 + ), + $5, + $6, + $7, + $8, + $9, + $10, + $11, + $12, + $13, + $14, + $15, + $16, + now(), + now() + ) + ON CONFLICT (external_id) + DO UPDATE + SET + purpose_primary_id = excluded.purpose_primary_id, + purpose_secondary_id = excluded.purpose_secondary_id, + purpose_use_id = excluded.purpose_use_id, + abstraction_period_start_day = excluded.abstraction_period_start_day, + abstraction_period_start_month = excluded.abstraction_period_start_month, + abstraction_period_end_day = excluded.abstraction_period_end_day, + abstraction_period_end_month = excluded.abstraction_period_end_month, + time_limited_start_date = excluded.time_limited_start_date, + time_limited_end_date = excluded.time_limited_end_date, + notes = excluded.notes, + instant_quantity = excluded.instant_quantity, + hourly_quantity = excluded.hourly_quantity, + daily_quantity = excluded.daily_quantity, + annual_quantity = excluded.annual_quantity, + date_updated = now() + RETURNING licence_version_purpose_id; + ` + + return db.query(query, params) +} + +async function _persistLicenceVersionPurposes (licenceVersionPurposes, licenceVersions) { + for (const licenceVersionPurpose of licenceVersionPurposes) { + const matchingLicenceVersion = licenceVersions.find((licenceVersion) => { + const { issue, increment } = licenceVersion + + return licenceVersionPurpose.issue === issue && licenceVersionPurpose.increment === increment + }) + + if (!matchingLicenceVersion) { + throw new Error('Cannot match licence version purpose to licence version') + } + + const { licenceVersionId } = matchingLicenceVersion + const results = await _persistLicenceVersionPurpose(licenceVersionPurpose, licenceVersionId) + + licenceVersionPurpose.licenceVersionPurposeId = results[0].licence_version_purpose_id + } +} + +async function _persistLicenceVersion (licenceVersion, licenceId) { + const params = [ + licenceId, + licenceVersion.issue, + licenceVersion.increment, + licenceVersion.status, + licenceVersion.startDate, + licenceVersion.endDate, + licenceVersion.externalId + ] + const query = ` + INSERT INTO water.licence_versions ( + licence_id, + issue, + "increment", + status, + start_date, + end_date, + external_id, + date_created, + date_updated + ) + VALUES ( + $1, + $2, + $3, + $4, + $5, + $6, + $7, + now(), + now() + ) + ON CONFLICT (external_id) + DO UPDATE + SET + licence_id = excluded.licence_id, + status = excluded.status, + start_date = excluded.start_date, + end_date = excluded.end_date, + date_updated = now() + RETURNING licence_version_id; + ` + + return db.query(query, params) +} + +async function _persistLicenceVersions (licenceVersions, licenceId) { + for (const licenceVersion of licenceVersions) { + const results = await _persistLicenceVersion(licenceVersion, licenceId) + + licenceVersion.licenceVersionId = results[0].licence_version_id + } +} + +async function _persistLicence (licence) { + const params = [ + licence.regionCode, + licence.licenceNumber, + licence.isWaterUndertaker, + licence.regions, + licence.startDate, + licence.expiredDate, + licence.lapsedDate, + licence.revokedDate + ] + const query = ` + INSERT INTO water.licences ( + region_id, + licence_ref, + is_water_undertaker, + regions, + start_date, + expired_date, + lapsed_date, + revoked_date + ) + VALUES ( + ( + SELECT + region_id + FROM + water.regions + WHERE + nald_region_id = $1 + ), + $2, + $3, + $4, + $5, + $6, + $7, + $8 + ) + ON CONFLICT (licence_ref) + DO UPDATE + SET + is_water_undertaker = excluded.is_water_undertaker, + regions = excluded.regions, + start_date = excluded.start_date, + expired_date = excluded.expired_date, + lapsed_date = excluded.lapsed_date, + revoked_date = excluded.revoked_date, + date_updated = now() + RETURNING licence_id; + ` + + return db.query(query, params) +} + +async function _persistDocumentRoles (documentRoles, documentId) { + for (const documentRole of documentRoles) { + await _persistDocumentRole(documentRole, documentId) + } +} + +async function _persistDocumentRole (documentRole, documentId) { + const params = [ + documentId, + documentRole.startDate, + documentRole.endDate, + documentRole.companyExternalId, + documentRole.contactExternalId, + documentRole.addressExternalId, + documentRole.role + ] + const query = ` + INSERT INTO crm_v2.document_roles ( + document_id, + role_id, + company_id, + contact_id, + address_id, + start_date, + end_date, + date_created, + date_updated + ) + SELECT + $1, + r.role_id, + c.company_id, + co.contact_id, + a.address_id, + $2, + $3, + NOW(), + NOW() + FROM + crm_v2.roles r + LEFT JOIN crm_v2.companies c ON c.external_id = $4 + LEFT JOIN crm_v2.contacts co ON co.external_id = $5 + LEFT JOIN crm_v2.addresses a ON a.external_id = $6 + WHERE + r.name = $7 + ON CONFLICT ( + document_id, + role_id, + start_date + ) + DO UPDATE + SET + company_id = EXCLUDED.company_id, + contact_id = EXCLUDED.contact_id, + address_id = EXCLUDED.address_id, + end_date = EXCLUDED.end_date, + date_updated = EXCLUDED.date_updated; + ` + + return db.query(query, params) +} + +async function _persistDocument (document) { + const params = [document.documentRef, document.startDate, document.endDate, document.externalId] + const query = ` + INSERT INTO crm_v2.documents ( + regime, + document_type, + document_ref, + start_date, + end_date, + external_id, + date_created, + date_updated, + date_deleted + ) + VALUES ( + 'water', + 'abstraction_licence', + $1, + $2, + $3, + $4, + NOW(), + NOW(), + NULL + ) + ON CONFLICT ( + regime, + document_type, + document_ref + ) + DO UPDATE + SET + start_date = EXCLUDED.start_date, + end_date = EXCLUDED.end_date, + external_id = EXCLUDED.external_id, + date_updated = EXCLUDED.date_updated, + date_deleted = EXCLUDED.date_deleted + RETURNING document_id; + ` + + return db.query(query, params) +} + +function _flagForSupplementary (licencePriorToImport, licence, licenceId) { + const { + expired_date: priorExpiredDate, + lapsed_date: priorLapsedDate, + revoked_date: priorRevokedDate + } = licencePriorToImport + + const { expiredDate, lapsedDate, revokedDate } = licence + + const expiredNotChanged = priorExpiredDate === expiredDate + const lapsedNotChanged = priorLapsedDate === lapsedDate + const revokedNotChanged = priorRevokedDate === revokedDate + + if (expiredNotChanged && lapsedNotChanged && revokedNotChanged) { + return + } + + // Only update the appropriate scheme's flag depending on what the licence is linked to; if both flag both, just got + // charge versions for one scheme then flag only it, else has no charge versions then do not flag at all. + // This updates the query to handle new SROC billing plus fixes an old problem of licences with no charge versions + // were getting flagged (with no charge versions they can't be billed and the flag then cleared). + const query = ` + UPDATE water.licences l SET + include_in_supplementary_billing = CASE + WHEN EXISTS ( + SELECT + 1 + FROM + water.charge_versions cv + WHERE + cv.licence_id = l.licence_id + AND cv.start_date < '2022-04-01'::Date + ) THEN 'yes' + ELSE include_in_supplementary_billing + END, + include_in_sroc_supplementary_billing = CASE + WHEN EXISTS ( + SELECT + 1 + FROM + water.charge_versions cv + WHERE + cv.licence_id = l.licence_id + AND cv.start_date >= '2022-04-01'::Date + ) THEN TRUE + ELSE include_in_sroc_supplementary_billing + END + WHERE + l.licence_id = $1; + ` + + return db.query(query, [licenceId]) +} + +module.exports = { + go +} diff --git a/src/modules/licence-details/lib/transformer.js b/src/modules/licence-details/lib/transformer.js new file mode 100644 index 00000000..d3466786 --- /dev/null +++ b/src/modules/licence-details/lib/transformer.js @@ -0,0 +1,262 @@ +'use strict' + +const DateHelpers = require('../../../lib/date-helpers.js') +const { isLicenceVersionReplaced } = require('./legacy.js') + +const REGIONS = { + AN: 'Anglian', + MD: 'Midlands', + NO: 'Northumbria', + NW: 'North West', + SO: 'Southern', + SW: 'South West (incl Wessex)', + TH: 'Thames', + WL: 'Wales', + YO: 'Yorkshire' +} + +const STATUSES = { + CURR: 'current', + SUPER: 'superseded', + DRAFT: 'draft' +} + +function go ( + naldAddresses, + naldLicence, + naldLicenceRoles, + naldLicenceVersions, + naldLicenceVersionPurposes, + naldLicenceVersionPurposeConditions, + naldParties) { + const licence = _licence(naldLicence, naldLicenceVersions) + const licenceVersionPurposes = _licenceVersionPurposes(naldLicenceVersionPurposes) + const licenceVersionPurposeConditions = _licenceVersionPurposeConditions(naldLicenceVersionPurposeConditions) + + const document = _document(licence) + const licenceHolderRoles = _licenceHolderRoles(document, naldLicenceVersions, naldAddresses, naldParties) + const returnsToRoles = _returnsToRoles(naldLicenceRoles, naldAddresses, naldParties) + + const licenceVersions = _licenceVersions(naldLicenceVersions) + + return { + document, + documentRoles: [...licenceHolderRoles, ...returnsToRoles], + licence, + licenceVersionPurposeConditions, + licenceVersionPurposes, + licenceVersions + } +} + +function _matchAddressForRole (roleAddressId, roleRegionCode, naldAddresses) { + return naldAddresses.find((naldAddress) => { + return naldAddress.ID === roleAddressId && naldAddress.FGAC_REGION_CODE === roleRegionCode + }) +} + +function _matchPartyForRole (rolePartyId, roleRegionCode, naldParties) { + return naldParties.find((naldParty) => { + return naldParty.ID === rolePartyId && naldParty.FGAC_REGION_CODE === roleRegionCode + }) +} + +function _licenceVersions (naldLicenceVersions) { + return naldLicenceVersions.map((naldLicenceVersion) => { + const { FGAC_REGION_CODE, AABL_ID, EFF_END_DATE, EFF_ST_DATE, ISSUE_NO, INCR_NO, STATUS } = naldLicenceVersion + + return { + issue: +ISSUE_NO, + increment: +INCR_NO, + status: STATUSES[STATUS], + startDate: DateHelpers.mapNaldDate(EFF_ST_DATE), + endDate: DateHelpers.mapNaldDate(EFF_END_DATE), + externalId: `${FGAC_REGION_CODE}:${AABL_ID}:${ISSUE_NO}:${INCR_NO}` + } + }) +} + +function _returnsToRoles (naldLicenceRoles, naldAddresses, naldParties) { + return naldLicenceRoles.map((naldLicenceRole) => { + const { ACON_AADD_ID: addressId, ACON_APAR_ID: partyId, FGAC_REGION_CODE: regionCode } = naldLicenceRole + + const matchingAddress = _matchAddressForRole(addressId, regionCode, naldAddresses) + const addressExternalId = `${matchingAddress.FGAC_REGION_CODE}:${matchingAddress.ID}` + + const matchingNaldParty = _matchPartyForRole(partyId, regionCode, naldParties) + const partyExternalId = `${matchingNaldParty.FGAC_REGION_CODE}:${matchingNaldParty.ID}` + + return { + role: 'returnsTo', + startDate: DateHelpers.mapNaldDate(naldLicenceRole.EFF_ST_DATE), + endDate: DateHelpers.mapNaldDate(naldLicenceRole.EFF_END_DATE), + companyExternalId: partyExternalId, + contactExternalId: matchingNaldParty.APAR_TYPE === 'PER' ? partyExternalId : null, + addressExternalId + } + }) +} + +function _licenceHolderRoles (document, naldLicenceVersions, naldAddresses, naldParties) { + const licenceVersionsToUse = naldLicenceVersions.filter((naldLicenceVersion) => { + return !isLicenceVersionReplaced(naldLicenceVersion, naldLicenceVersions) + }) + + return licenceVersionsToUse.map((licenceVersion) => { + const { ACON_AADD_ID: addressId, ACON_APAR_ID: partyId, FGAC_REGION_CODE: regionCode } = licenceVersion + + const matchingAddress = _matchAddressForRole(addressId, regionCode, naldAddresses) + const addressExternalId = `${matchingAddress.FGAC_REGION_CODE}:${matchingAddress.ID}` + + const matchingNaldParty = _matchPartyForRole(partyId, regionCode, naldParties) + const partyExternalId = `${matchingNaldParty.FGAC_REGION_CODE}:${matchingNaldParty.ID}` + + return { + role: 'licenceHolder', + startDate: DateHelpers.getMaxDate([document.startDate, DateHelpers.mapNaldDate(licenceVersion.EFF_ST_DATE)]), + endDate: DateHelpers.getMinDate([document.endDate, DateHelpers.mapNaldDate(licenceVersion.EFF_END_DATE)]), + companyExternalId: partyExternalId, + contactExternalId: matchingNaldParty.APAR_TYPE === 'PER' ? partyExternalId : null, + addressExternalId + } + }) +} + +function _document (licence) { + const { endDate, externalId, licenceNumber: documentRef, startDate } = licence + + return { + documentRef, + startDate, + endDate, + externalId + } +} + +function _licenceVersionPurposeConditions (naldLicenceVersionPurposeConditions) { + return naldLicenceVersionPurposeConditions.map((condition) => { + return { + code: condition.ACIN_CODE, + subcode: condition.ACIN_SUBCODE, + param1: _null(condition.PARAM1), + param2: _null(condition.PARAM2), + notes: _null(condition.TEXT), + purposeExternalId: `${condition.FGAC_REGION_CODE}:${condition.AABP_ID}`, + externalId: `${condition.ID}:${condition.FGAC_REGION_CODE}:${condition.AABP_ID}` + } + }) +} + +function _licenceVersionPurposes (naldLicenceVersionPurposes) { + return naldLicenceVersionPurposes.map((naldLicenceVersionPurpose) => { + // NOTE: For those that haven't seen it before + before a variable is known as Unary plus + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Unary_plus + // + // In this context it is being used to convert the value to a number. We've left it as found because the docs state + // + // > unary plus is the fastest and preferred way of converting something into a number, because it does not perform + // > any other operations on the number. + const instantQuantity = _null(naldLicenceVersionPurpose.INST_QTY) + const hourlyQuantity = _null(naldLicenceVersionPurpose.HOURLY_QTY) + const dailyQuantity = _null(naldLicenceVersionPurpose.DAILY_QTY) + const annualQuantity = _null(naldLicenceVersionPurpose.ANNUAL_QTY) + + return { + issue: +naldLicenceVersionPurpose.AABV_ISSUE_NO, + increment: +naldLicenceVersionPurpose.AABV_INCR_NO, + purposePrimary: naldLicenceVersionPurpose.APUR_APPR_CODE, + purposeSecondary: naldLicenceVersionPurpose.APUR_APSE_CODE, + purposeUse: naldLicenceVersionPurpose.APUR_APUS_CODE, + abstractionPeriodStartDay: +naldLicenceVersionPurpose.PERIOD_ST_DAY, + abstractionPeriodStartMonth: +naldLicenceVersionPurpose.PERIOD_ST_MONTH, + abstractionPeriodEndDay: +naldLicenceVersionPurpose.PERIOD_END_DAY, + abstractionPeriodEndMonth: +naldLicenceVersionPurpose.PERIOD_END_MONTH, + timeLimitedStartDate: DateHelpers.mapNaldDate(naldLicenceVersionPurpose.TIMELTD_ST_DATE), + timeLimitedEndDate: DateHelpers.mapNaldDate(naldLicenceVersionPurpose.TIMELTD_END_DATE), + notes: _null(naldLicenceVersionPurpose.NOTES), + instantQuantity: instantQuantity ? +instantQuantity : null, + hourlyQuantity: hourlyQuantity ? +hourlyQuantity : null, + dailyQuantity: dailyQuantity ? +dailyQuantity : null, + annualQuantity: annualQuantity ? +annualQuantity : null, + externalId: `${naldLicenceVersionPurpose.FGAC_REGION_CODE}:${naldLicenceVersionPurpose.ID}` + } + }) +} + +function _endDates (licence) { + const rawEndDates = [ + licence.EXPIRY_DATE, + licence.REV_DATE, + licence.LAPSED_DATE + ] + + const nulledEndDates = rawEndDates.map((rawEndDate) => { + return _null(rawEndDate) + }) + + const filteredEndDates = nulledEndDates.map((nulledEndDate) => { + return nulledEndDate + }) + + const mappedEndDates = filteredEndDates.map((filteredEndDate) => { + return DateHelpers.mapNaldDate(filteredEndDate) + }) + + return mappedEndDates +} + +function _startDate (naldLicence, naldLicenceVersions) { + if (naldLicence.ORIG_EFF_DATE !== 'null') { + return DateHelpers.mapNaldDate(naldLicence.ORIG_EFF_DATE) + } + + const licenceVersionStartDates = naldLicenceVersions.map((naldLicenceVersion) => { + return _null(naldLicenceVersion.EFF_ST_DATE) + }) + + const sortedLicenceVersionStartDates = licenceVersionStartDates.sort() + + // Return the earliest + return DateHelpers.mapNaldDate(sortedLicenceVersionStartDates[0]) +} + +function _licence (naldLicence, naldLicenceVersions) { + const endDates = _endDates(naldLicence) + const startDate = _startDate(naldLicence, naldLicenceVersions) + + return { + licenceNumber: naldLicence.LIC_NO, + startDate, + endDate: DateHelpers.getMinDate(endDates), + externalId: `${naldLicence.FGAC_REGION_CODE}:${naldLicence.ID}`, + isWaterUndertaker: naldLicence.AREP_EIUC_CODE.endsWith('SWC'), + regions: _regions(naldLicence), + regionCode: parseInt(naldLicence.FGAC_REGION_CODE, 10), + expiredDate: DateHelpers.mapNaldDate(naldLicence.EXPIRY_DATE), + lapsedDate: DateHelpers.mapNaldDate(naldLicence.LAPSED_DATE), + revokedDate: DateHelpers.mapNaldDate(naldLicence.REV_DATE) + } +} + +function _regions (naldLicence) { + const regionPrefix = naldLicence.AREP_EIUC_CODE.substr(0, 2) + + return { + historicalAreaCode: naldLicence.AREP_AREA_CODE, + regionalChargeArea: REGIONS[regionPrefix], + standardUnitChargeCode: naldLicence.AREP_SUC_CODE, + localEnvironmentAgencyPlanCode: naldLicence.AREP_LEAP_CODE + } +} + +function _null (value) { + if (value !== 'null') { + return value + } + + return null +} + +module.exports = { + go +} diff --git a/src/modules/licence-details/process-steps.js b/src/modules/licence-details/process-steps.js new file mode 100644 index 00000000..551d096b --- /dev/null +++ b/src/modules/licence-details/process-steps.js @@ -0,0 +1,32 @@ +'use strict' + +const LicenceImportStep = require('./steps/licence-import.js') +const PointsImportStep = require('./steps/points-import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + let counts = {} + + try { + global.GlobalNotifier.omg('licence-details started') + + const startTime = currentTimeInNanoseconds() + + counts = await LicenceImportStep.go() + await PointsImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'licence-details complete') + } catch (error) { + global.GlobalNotifier.oops('licence-details failed') + } + + return { processComplete, counts } +} + +module.exports = { + go +} diff --git a/src/modules/licence-details/routes.js b/src/modules/licence-details/routes.js new file mode 100644 index 00000000..6310be90 --- /dev/null +++ b/src/modules/licence-details/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers') + +const routes = [ + { + method: 'post', + handler: controllers.licenceDetails, + path: '/licence-details' + } +] + +module.exports = routes diff --git a/src/modules/licence-details/steps/licence-import.js b/src/modules/licence-details/steps/licence-import.js new file mode 100644 index 00000000..94c85502 --- /dev/null +++ b/src/modules/licence-details/steps/licence-import.js @@ -0,0 +1,91 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const Loader = require('../lib/loader.js') + +const config = require('../../../../config.js') + +const PROGRESS_TICK = 1000 + +async function go () { + let count = 0 + let rejected = 0 + + try { + global.GlobalNotifier.omg('licence-details.licence-import started') + + const startTime = currentTimeInNanoseconds() + + const licences = await _licences() + + count = licences.length + + rejected = await _import(licences, count) + + calculateAndLogTimeTaken(startTime, 'licence-details.licence-import complete', { count, rejected }) + } catch (error) { + global.GlobalNotifier.omfg('licence-details.licence-import errored', error, { count, rejected }) + throw error + } + + return { count, rejected } +} + +async function _import (licences, count) { + const batchSize = config.processBatchSize + + let progress = PROGRESS_TICK + let rejected = 0 + + for (let i = 0; i < count; i += batchSize) { + if (i === progress) { + progress = progress + PROGRESS_TICK + global.GlobalNotifier.omg(`licence-details.licence-import progress (${i} of ${count})`) + } + + const licencesToProcess = licences.slice(i, i + batchSize) + + const processes = licencesToProcess.map((licenceToProcess) => { + return Loader.go(licenceToProcess) + }) + + const results = await Promise.allSettled(processes) + const rejectedResults = results.filter((result) => { + return result.status === 'rejected' + }) + + if (rejectedResults.length === batchSize) { + throw new Error('Whole batch rejected') + } + + rejected += rejectedResults.length + } + + return rejected +} + +async function _licences () { + const query = ` + SELECT + "ID", + "LIC_NO", + "FGAC_REGION_CODE", + "ORIG_EFF_DATE", + "EXPIRY_DATE", + "LAPSED_DATE", + "REV_DATE", + "AREP_EIUC_CODE", + "AREP_AREA_CODE", + "AREP_SUC_CODE", + "AREP_LEAP_CODE" + FROM + "import"."NALD_ABS_LICENCES"; + ` + + return db.query(query) +} + +module.exports = { + go +} diff --git a/src/modules/licence-details/steps/points-import.js b/src/modules/licence-details/steps/points-import.js new file mode 100644 index 00000000..3057462b --- /dev/null +++ b/src/modules/licence-details/steps/points-import.js @@ -0,0 +1,76 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') + +async function go () { + try { + global.GlobalNotifier.omg('licence-details.points-import started') + + const startTime = currentTimeInNanoseconds() + + await db.query(_query()) + + calculateAndLogTimeTaken(startTime, 'licence-details.points-import complete') + } catch (error) { + global.GlobalNotifier.omfg('licence-details.points-import errored', error) + throw error + } +} + +function _query () { + return ` + INSERT INTO water.licence_version_purpose_points ( + licence_version_purpose_id, + description, + ngr_1, + ngr_2, + ngr_3, + ngr_4, + external_id, + nald_point_id + ) + SELECT + lvp.licence_version_purpose_id, + np."LOCAL_NAME" AS description, + concat_ws(' ', np."NGR1_SHEET", np."NGR1_EAST", np."NGR1_NORTH") AS ngr_1, + ( + CASE np."NGR2_SHEET" + WHEN 'null' THEN NULL + ELSE concat_ws(' ', np."NGR2_SHEET", np."NGR2_EAST", np."NGR2_NORTH") + END + ) AS ngr_2, + ( + CASE np."NGR3_SHEET" + WHEN 'null' THEN NULL + ELSE concat_ws(' ', np."NGR3_SHEET", np."NGR3_EAST", np."NGR3_NORTH") + END + ) AS ngr_3, + ( + CASE np."NGR4_SHEET" + WHEN 'null' THEN NULL + ELSE concat_ws(' ', np."NGR4_SHEET", np."NGR4_EAST", np."NGR4_NORTH") + END + ) AS ngr_4, + (concat_ws(':', napp."FGAC_REGION_CODE", napp."AABP_ID", napp."AAIP_ID")) AS external_id, + napp."AAIP_ID"::integer AS nald_point_id + FROM + "import"."NALD_ABS_PURP_POINTS" napp + INNER JOIN water.licence_version_purposes lvp + ON napp."FGAC_REGION_CODE" = split_part(lvp.external_id, ':', 1) AND napp."AABP_ID" = split_part(lvp.external_id, ':', 2) + INNER JOIN import."NALD_POINTS" np + ON np."ID" = napp."AAIP_ID" AND np."FGAC_REGION_CODE" = napp."FGAC_REGION_CODE" + ON CONFLICT(external_id) + DO UPDATE + SET + description = excluded.description, + ngr_1 = excluded.ngr_1, + ngr_2 = excluded.ngr_2, + ngr_3 = excluded.ngr_3, + ngr_4 = excluded.ngr_4; + ` +} + +module.exports = { + go +} diff --git a/src/modules/licence-import/connectors/documents.js b/src/modules/licence-import/connectors/documents.js deleted file mode 100644 index d5aa47e9..00000000 --- a/src/modules/licence-import/connectors/documents.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const { pool } = require('../../../lib/connectors/db') -const queries = require('./queries/documents') - -/** - * Mark any documents in crm_v2.documents as deleted if the licence - * numbers no longer exist in import.NALD_ABS_LICENCES - */ -const deleteRemovedDocuments = () => pool.query(queries.deleteCrmV2Documents) - -module.exports = { - deleteRemovedDocuments -} diff --git a/src/modules/licence-import/connectors/import-companies.js b/src/modules/licence-import/connectors/import-companies.js deleted file mode 100644 index f57e6510..00000000 --- a/src/modules/licence-import/connectors/import-companies.js +++ /dev/null @@ -1,44 +0,0 @@ -const queries = require('./queries/import-companies') -const { pool } = require('../../../lib/connectors/db') - -/** - * Clear the water_import.import_companies table ready for a new import - * @return {Promise} - */ -const clear = () => - pool.query(queries.clear) - -/** - * Initialise the water_import.import_companies table by copying a list - * of all NALD parties to the local table - * @return {Promise} - */ -const initialise = async () => { - const { rows } = await pool.query(queries.initialise) - return rows -} - -/** - * Sets the specified party as imported in the water_import.import_companies table - * @param {Number} regionCode - * @param {Number} partyId - * @return {Promise} - */ -const setImportedStatus = (regionCode, partyId) => - pool.query(queries.setImportedStatus, [regionCode, partyId]) - -/** - * Gets the number of parties which still need importing as companies - * @return {Promise} - */ -const getPendingCount = async () => { - const { rows: [{ count }] } = await pool.query(queries.getPendingCount) - return parseInt(count) -} - -module.exports = { - clear, - initialise, - setImportedStatus, - getPendingCount -} diff --git a/src/modules/licence-import/connectors/purpose-conditions-types.js b/src/modules/licence-import/connectors/purpose-conditions-types.js deleted file mode 100644 index 9a5e2e36..00000000 --- a/src/modules/licence-import/connectors/purpose-conditions-types.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const { pool } = require('../../../lib/connectors/db') -const queries = require('./queries/purpose-condition-types') - -/** - * Create purpose condition types - * If they exist update the existing records - */ -const createPurposeConditionTypes = async () => pool.query(queries.createPurposeConditionTypes) - -module.exports = { - createPurposeConditionTypes -} diff --git a/src/modules/licence-import/connectors/queries/documents.js b/src/modules/licence-import/connectors/queries/documents.js deleted file mode 100644 index 58bce876..00000000 --- a/src/modules/licence-import/connectors/queries/documents.js +++ /dev/null @@ -1,15 +0,0 @@ -const deleteCrmV2Documents = ` - update crm_v2.documents - set date_deleted = now() - where document_ref not in ( - select l."LIC_NO" - from import."NALD_ABS_LICENCES" l - ) - and date_deleted is null - and regime = 'water' - and document_type = 'abstraction_licence'; -` - -module.exports = { - deleteCrmV2Documents -} diff --git a/src/modules/licence-import/connectors/queries/import-companies.js b/src/modules/licence-import/connectors/queries/import-companies.js deleted file mode 100644 index 21806148..00000000 --- a/src/modules/licence-import/connectors/queries/import-companies.js +++ /dev/null @@ -1,17 +0,0 @@ -const clear = 'DELETE FROM water_import.import_companies;' - -const initialise = `INSERT INTO water_import.import_companies -(region_code, party_id, date_created, date_updated) -SELECT p."FGAC_REGION_CODE"::integer, p."ID"::integer, NOW(), NOW() -FROM import."NALD_PARTIES" p ON CONFLICT (region_code, party_id) DO NOTHING RETURNING *;` - -const setImportedStatus = 'UPDATE water_import.import_companies SET imported=true, date_updated=NOW() WHERE region_code=$1 AND party_id=$2;' - -const getPendingCount = 'SELECT COUNT(*) FROM water_import.import_companies WHERE imported=false;' - -module.exports = { - clear, - initialise, - setImportedStatus, - getPendingCount -} diff --git a/src/modules/licence-import/controller.js b/src/modules/licence-import/controller.js deleted file mode 100644 index ba95e1fb..00000000 --- a/src/modules/licence-import/controller.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict' - -const DeleteRemovedDocumentsJob = require('./jobs/delete-removed-documents.js') -const ImportCompanyJob = require('./jobs/import-company.js') -const ImportLicenceJob = require('./jobs/import-licence.js') - -const Boom = require('@hapi/boom') - -const postImport = async (request, h) => { - const message = DeleteRemovedDocumentsJob.createMessage() - - try { - await request.server.messageQueue.deleteQueue(DeleteRemovedDocumentsJob.name) - await request.server.messageQueue.publish(message) - - return h.response().code(202) - } catch (error) { - throw Boom.boomify(error) - } -} - -const postImportCompany = async (request, h) => { - const { regionCode, partyId } = request.query - const data = { - regionCode, - partyId, - jobNumber: 1, - numberOfJobs: 1 - } - const message = ImportCompanyJob.createMessage(data) - - try { - await request.server.messageQueue.deleteQueue(ImportCompanyJob.name) - await request.server.messageQueue.publish(message) - - return h.response().code(202) - } catch (error) { - throw Boom.boomify(error) - } -} - -const postImportLicence = async (request, h) => { - const { licenceNumber } = request.query - const data = { - licenceNumber, - jobNumber: 1, - numberOfJobs: 1 - } - const message = ImportLicenceJob.createMessage(data) - - try { - await request.server.messageQueue.deleteQueue(ImportLicenceJob.name) - await request.server.messageQueue.publish(message) - - return h.response().code(202) - } catch (error) { - throw Boom.boomify(error) - } -} - -module.exports = { - postImport, - postImportCompany, - postImportLicence -} diff --git a/src/modules/licence-import/extract/connectors/index.js b/src/modules/licence-import/extract/connectors/index.js deleted file mode 100644 index cdbf37fa..00000000 --- a/src/modules/licence-import/extract/connectors/index.js +++ /dev/null @@ -1,83 +0,0 @@ -'use strict' - -const { pool } = require('../../../../lib/connectors/db') -const queries = require('./queries') - -const findOne = async (query, params) => { - const { rows: [row] } = await pool.query(query, params) - return row -} - -const findMany = async (query, params) => { - const { rows } = await pool.query(query, params) - return rows -} - -const getLicence = licenceNumber => - findOne(queries.getLicence, [licenceNumber]) - -const getLicenceVersions = (regionCode, licenceId) => - findMany(queries.getLicenceVersions, [regionCode, licenceId]) - -const getAllParties = () => findMany(queries.getAllParties) - -const getAllAddresses = () => findMany(queries.getAllAddresses) - -const getChargeVersions = (regionCode, licenceId) => - findMany(queries.getChargeVersions, [regionCode, licenceId]) - -const getTwoPartTariffAgreements = (regionCode, licenceId) => - findMany(queries.getTwoPartTariffAgreements, [regionCode, licenceId]) - -const getSection130Agreements = (regionCode, licenceId) => - findMany(queries.getSection130Agreements, [regionCode, licenceId]) - -const getInvoiceAccounts = (regionCode, partyId) => - findMany(queries.getInvoiceAccounts, [regionCode, partyId]) - -const getPartyLicenceVersions = (regionCode, partyId) => - findMany(queries.getPartyLicenceVersions, [regionCode, partyId]) - -const getParties = (regionCode, partyIds = []) => - findMany(queries.getParties, [regionCode, partyIds.join(',')]) - -const getAddresses = (regionCode, addressIds = []) => - findMany(queries.getAddresses, [regionCode, addressIds.join(',')]) - -const getAllLicenceNumbers = () => - findMany(queries.getAllLicenceNumbers) - -const getParty = (regionCode, partyId) => - findOne(queries.getParty, [regionCode, partyId]) - -const getLicencePurposes = (regionCode, licenceId) => - findMany(queries.getLicencePurposes, [regionCode, licenceId]) - -const getPurposeConditions = (regionCode, purposeId) => - findMany(queries.getPurposeConditions, [regionCode, purposeId]) - -const getLicenceRoles = (regionCode, licenceId) => - findMany(queries.getLicenceRoles, [regionCode, licenceId]) - -const getPartyLicenceRoles = (regionCode, partyId) => - findMany(queries.getPartyLicenceRoles, [regionCode, partyId]) - -module.exports = { - getPurposeConditions, - getAddresses, - getAllAddresses, - getAllLicenceNumbers, - getAllParties, - getChargeVersions, - getInvoiceAccounts, - getLicence, - getLicencePurposes, - getLicenceVersions, - getParties, - getParty, - getPartyLicenceVersions, - getSection130Agreements, - getTwoPartTariffAgreements, - getLicenceRoles, - getPartyLicenceRoles -} diff --git a/src/modules/licence-import/extract/connectors/queries.js b/src/modules/licence-import/extract/connectors/queries.js deleted file mode 100644 index d998b94f..00000000 --- a/src/modules/licence-import/extract/connectors/queries.js +++ /dev/null @@ -1,182 +0,0 @@ -const getLicence = ` - SELECT * - FROM import."NALD_ABS_LICENCES" l - WHERE l."LIC_NO"=$1; -` - -const getLicenceVersions = ` - SELECT * - FROM import."NALD_ABS_LIC_VERSIONS" v - WHERE v."FGAC_REGION_CODE"=$1 - AND v."AABL_ID"=$2 - AND v."STATUS"<>'DRAFT'; -` - -const getLicencePurposes = ` - select purposes.* - from import."NALD_ABS_LIC_VERSIONS" versions - join import."NALD_ABS_LIC_PURPOSES" purposes - on versions."AABL_ID" = purposes."AABV_AABL_ID" - and versions."ISSUE_NO" = purposes."AABV_ISSUE_NO" - and versions."INCR_NO" = purposes."AABV_INCR_NO" - and versions."FGAC_REGION_CODE" = purposes."FGAC_REGION_CODE" - where versions."FGAC_REGION_CODE" = $1 - and versions."AABL_ID" = $2; -` - -const getPurposeConditions = ` -select conditions.* -from import."NALD_ABS_LIC_VERSIONS" versions - join import."NALD_ABS_LIC_PURPOSES" purposes - on versions."AABL_ID" = purposes."AABV_AABL_ID" - and versions."ISSUE_NO" = purposes."AABV_ISSUE_NO" - and versions."INCR_NO" = purposes."AABV_INCR_NO" - and versions."FGAC_REGION_CODE" = purposes."FGAC_REGION_CODE" - join import."NALD_LIC_CONDITIONS" conditions - on purposes."FGAC_REGION_CODE" = conditions."FGAC_REGION_CODE" - and purposes."ID" = conditions."AABP_ID" -where versions."FGAC_REGION_CODE" = $1 -and versions."AABL_ID" = $2 -` - -const getParty = `SELECT * FROM import."NALD_PARTIES" p - WHERE p."FGAC_REGION_CODE"=$1 - AND p."ID" = $2` - -const getAddress = `SELECT * FROM import."NALD_ADDRESSES" a - WHERE a."FGAC_REGION_CODE"=$1 - AND a."ID" = $2` - -const getAllAddresses = 'SELECT * FROM import."NALD_ADDRESSES"' - -const getAllParties = 'SELECT "FGAC_REGION_CODE", "ID" FROM import."NALD_PARTIES"' - -const getChargeVersions = ` - SELECT * - FROM import."NALD_CHG_VERSIONS" cv - JOIN import."NALD_IAS_INVOICE_ACCS" ia - ON cv."AIIA_IAS_CUST_REF"=ia."IAS_CUST_REF" - AND cv."FGAC_REGION_CODE"=ia."FGAC_REGION_CODE" - AND cv."AIIA_ALHA_ACC_NO"=ia."ALHA_ACC_NO" - WHERE cv."FGAC_REGION_CODE"=$1 - AND cv."AABL_ID"=$2 AND cv."STATUS"<>'DRAFT' - AND ia."IAS_XFER_DATE"<>'null' - ORDER BY cv."VERS_NO"::integer; -` - -const getTwoPartTariffAgreements = ` -SELECT - a.*, - cv."EFF_END_DATE" as charge_version_end_date, - cv."EFF_ST_DATE" as charge_version_start_date, - cv."VERS_NO" as version_number -FROM import."NALD_CHG_VERSIONS" cv -JOIN import."NALD_CHG_ELEMENTS" e ON cv."FGAC_REGION_CODE"=e."FGAC_REGION_CODE" AND cv."VERS_NO"=e."ACVR_VERS_NO" AND cv."AABL_ID"=e."ACVR_AABL_ID" -JOIN import."NALD_CHG_AGRMNTS" a ON e."FGAC_REGION_CODE"=a."FGAC_REGION_CODE" AND e."ID"=a."ACEL_ID" -WHERE - cv."FGAC_REGION_CODE"=$1 - -- Exclude charge versions that have been replaced. We know a CV is replaced because it will have the same start and end date - AND cv."EFF_END_DATE" <> cv."EFF_ST_DATE" - AND cv."AABL_ID"=$2 - AND a."AFSA_CODE"='S127' - AND concat_ws(':', cv."FGAC_REGION_CODE", cv."AABL_ID", cv."VERS_NO") in ( - -- Finds valid charge versions to select from. - -- Draft charge versions are omitted. - -- Where multiple charge versions begin on the same date, - -- pick the one with the greatest version number. - select concat_ws(':', - ncv."FGAC_REGION_CODE", - ncv."AABL_ID", - max(ncv."VERS_NO"::integer)::varchar - ) as id - from import."NALD_CHG_VERSIONS" ncv - where ncv."STATUS"<>'DRAFT' - group by ncv."FGAC_REGION_CODE", ncv."AABL_ID", ncv."EFF_ST_DATE" - ) -ORDER BY cv."VERS_NO"::integer; -` - -const getSection130Agreements = `SELECT * FROM import."NALD_LH_AGRMNTS" ag -JOIN ( - SELECT DISTINCT cv."FGAC_REGION_CODE", cv."AIIA_ALHA_ACC_NO" - FROM import."NALD_CHG_VERSIONS" cv - WHERE cv."FGAC_REGION_CODE"=$1 AND cv."AABL_ID"=$2 AND cv."STATUS"<>'DRAFT' -) cv ON ag."FGAC_REGION_CODE"=cv."FGAC_REGION_CODE" AND ag."ALHA_ACC_NO"=cv."AIIA_ALHA_ACC_NO" -AND ag."AFSA_CODE" IN ('S127', 'S130S', 'S130T', 'S130U', 'S130W') -` - -const getInvoiceAccounts = ` -select a."ACON_APAR_ID" AS licence_holder_party_id, p."NAME" AS licence_holder_party_name, -p2."NAME" AS invoice_account_party_name, -i.* -from import."NALD_LH_ACCS" a -join import."NALD_PARTIES" p on a."ACON_APAR_ID"=p."ID" and a."FGAC_REGION_CODE"=p."FGAC_REGION_CODE" -join import."NALD_IAS_INVOICE_ACCS" i -on - a."ACC_NO"=i."ALHA_ACC_NO" - and a."FGAC_REGION_CODE"=i."FGAC_REGION_CODE" -join import."NALD_PARTIES" p2 on i."ACON_APAR_ID"=p2."ID" AND i."FGAC_REGION_CODE"=p2."FGAC_REGION_CODE" -join ( - select distinct cv."AIIA_IAS_CUST_REF", cv."AIIA_ALHA_ACC_NO", cv."FGAC_REGION_CODE" - from import."NALD_CHG_VERSIONS" cv - where cv."STATUS"<>'DRAFT' -) cv -on - i."IAS_CUST_REF"=cv."AIIA_IAS_CUST_REF" - and i."FGAC_REGION_CODE"=cv."FGAC_REGION_CODE" - and i."ALHA_ACC_NO"=cv."AIIA_ALHA_ACC_NO" -where - i."IAS_XFER_DATE"<>'null' - and a."FGAC_REGION_CODE"=$1 - and a."ACON_APAR_ID"=$2 -` - -const getPartyLicenceVersions = `SELECT lv.*, l."REV_DATE", l."LAPSED_DATE", l."EXPIRY_DATE" FROM import."NALD_ABS_LIC_VERSIONS" lv -JOIN import."NALD_ABS_LICENCES" l ON lv."AABL_ID"=l."ID" AND lv."FGAC_REGION_CODE"=l."FGAC_REGION_CODE" -WHERE lv."FGAC_REGION_CODE"=$1 AND lv."ACON_APAR_ID"=$2 -AND lv."STATUS"<>'DRAFT'` - -const getParties = `SELECT * FROM import."NALD_PARTIES" p -WHERE p."FGAC_REGION_CODE"=$1 -AND p."ID" = any (string_to_array($2, ',')::text[])` - -const getAddresses = `SELECT * FROM import."NALD_ADDRESSES" a -WHERE a."FGAC_REGION_CODE"=$1 -AND a."ID" = any (string_to_array($2, ',')::text[])` - -const getAllLicenceNumbers = ` - SELECT l."LIC_NO" - FROM import."NALD_ABS_LICENCES" l; -` - -const getLicenceRoles = ` -select * from import."NALD_LIC_ROLES" r -where r."FGAC_REGION_CODE"=$1 and r."AABL_ID"=$2 -order by to_date(r."EFF_ST_DATE", 'DD/MM/YYYY') -` - -const getPartyLicenceRoles = ` -select * from import."NALD_LIC_ROLES" r - where r."FGAC_REGION_CODE"=$1 and r."ACON_APAR_ID"=$2 -` - -module.exports = { - getLicence, - getLicenceVersions, - getLicencePurposes, - getPurposeConditions, - getParty, - getAddress, - getAllAddresses, - getAllParties, - getChargeVersions, - getTwoPartTariffAgreements, - getSection130Agreements, - getInvoiceAccounts, - getPartyLicenceVersions, - getParties, - getAddresses, - getAllLicenceNumbers, - getLicenceRoles, - getPartyLicenceRoles -} diff --git a/src/modules/licence-import/extract/index.js b/src/modules/licence-import/extract/index.js deleted file mode 100644 index d433cfb1..00000000 --- a/src/modules/licence-import/extract/index.js +++ /dev/null @@ -1,84 +0,0 @@ -'use strict' - -const importConnector = require('./connectors') - -const getIds = (idProperty, ...args) => { - let ids = args.flatMap(num => num) - ids = ids.map(row => row[idProperty]) - return [...new Set(ids)] -} - -const getLicenceParties = (regionCode, versions, chargeVersions, roles) => { - const partyIds = getIds('ACON_APAR_ID', versions, chargeVersions, roles) - return importConnector.getParties(regionCode, partyIds) -} - -const getLicenceAddresses = (regionCode, versions, chargeVersions, roles) => { - const addressIds = getIds('ACON_AADD_ID', versions, chargeVersions, roles) - return importConnector.getAddresses(regionCode, addressIds) -} - -const getLicenceData = async licenceNumber => { - const licence = await importConnector.getLicence(licenceNumber) - const { ID: id, FGAC_REGION_CODE: regionCode } = licence - - const [versions, chargeVersions, tptAgreements, section130Agreements, purposes, conditions, roles] = await Promise.all([ - importConnector.getLicenceVersions(regionCode, id), - importConnector.getChargeVersions(regionCode, id), - importConnector.getTwoPartTariffAgreements(regionCode, id), - importConnector.getSection130Agreements(regionCode, id), - importConnector.getLicencePurposes(regionCode, id), - importConnector.getPurposeConditions(regionCode, id), - importConnector.getLicenceRoles(regionCode, id) - ]) - - const [parties, addresses] = await Promise.all([ - getLicenceParties(regionCode, versions, chargeVersions, roles), - getLicenceAddresses(regionCode, versions, chargeVersions, roles) - ]) - - return { - addresses, - chargeVersions, - licence, - parties, - purposes, - conditions, - section130Agreements, - tptAgreements, - versions, - roles - } -} - -const getCompanyAddresses = (regionCode, ...args) => { - let addressIds = args.flatMap(num => num) - addressIds = addressIds.map(row => row.ACON_AADD_ID) - return importConnector.getAddresses(regionCode, [...new Set(addressIds)]) -} - -const getCompanyData = async (regionCode, partyId) => { - const [party, invoiceAccounts, licenceVersions, licenceRoles] = await Promise.all([ - importConnector.getParty(regionCode, partyId), - importConnector.getInvoiceAccounts(regionCode, partyId), - importConnector.getPartyLicenceVersions(regionCode, partyId), - importConnector.getPartyLicenceRoles(regionCode, partyId) - ]) - - const addresses = await getCompanyAddresses(regionCode, licenceVersions, invoiceAccounts, licenceRoles) - - return { - party, - addresses, - invoiceAccounts, - licenceVersions, - licenceRoles - } -} - -module.exports = { - getLicenceData, - getCompanyData, - getAllParties: importConnector.getAllParties, - getAllLicenceNumbers: importConnector.getAllLicenceNumbers -} diff --git a/src/modules/licence-import/jobs/delete-removed-documents.js b/src/modules/licence-import/jobs/delete-removed-documents.js deleted file mode 100644 index 045f1241..00000000 --- a/src/modules/licence-import/jobs/delete-removed-documents.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const documentsConnector = require('../connectors/documents') -const ImportPurposeConditionTypesJob = require('./import-purpose-condition-types.js') - -const JOB_NAME = 'licence-import.delete-removed-documents' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME, - expireIn: '1 hours' - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - return documentsConnector.deleteRemovedDocuments() - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - await messageQueue.publish(ImportPurposeConditionTypesJob.createMessage()) - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/licence-import/jobs/import-company.js b/src/modules/licence-import/jobs/import-company.js deleted file mode 100644 index 76b09c55..00000000 --- a/src/modules/licence-import/jobs/import-company.js +++ /dev/null @@ -1,96 +0,0 @@ -'use strict' - -const QueueLicencesSystemJob = require('./queue-licences-system') -const extract = require('../extract') -const importCompanies = require('../connectors/import-companies') -const load = require('../load') -const transform = require('../transform') - -const JOB_NAME = 'licence-import.import-company' - -const options = { - teamSize: 75, - teamConcurrency: 1 -} - -/** - * Data needed by the import company handler to process the job - * - * This is a convention with PGBoss. A number of the jobs/handlers implement a `createMessage()` function which returns - * a data object that will be used to queue the job. When it then gets processed the data object is passed to the - * handler. - * - * It may also contain non-default config to be used by PGBoss when adding the job, for example, the use of - * `singletonKey` in this job. - * - * @param {Object} data - information needed for the handler to complete the job - * @param {Object.string} data.regionCode - region Code from NALD_PARTIES - * @param {Object.string} data.partyId - id from NALD_PARTIES - * @param {Object.number} data.jobNumber - index position of this job from all jobs when added to the queue - * @param {Object.number} data.numberOfJobs - total number of import-company jobs queued in this session - * - * @return {Object} the message object used by the handler to process the job - */ -function createMessage (data) { - return { - name: JOB_NAME, - data, - options: { - singletonKey: `${JOB_NAME}.${data.regionCode}.${data.partyId}`, - expireIn: '1 hours' - } - } -} - -async function handler (job) { - try { - // Most 'jobs' are single operation things in the licence import process, for example, delete any removed documents - // or import the purposes types. However, there are typically 69K instances of this job queued up as part of the - // process! Previously, we logged every instance hence this was a primary offender in adding noise to the logs. We - // removed that logging but that leaves us with no way of confirming the job is running. So, instead we get - // src/modules/licence-import/jobs/queue-companies.js to include details on how many jobs are queued and when each - // one was added to the queue. We then use this information to log when the first is picked up and the last. - // - // N.B. It's not entirely accurate. If you added logging for all back in you might see the start message appear - // after a few jobs and likewise the finished message a few before the end. But it's good enough to give an - // indication that the 'jobs' did start and finish. - if (job.data.jobNumber === 1) { - global.GlobalNotifier.omg(`${JOB_NAME}: started`, { numberOfJobs: job.data.numberOfJobs }) - } - - const { regionCode, partyId } = job.data - - // Extract data - const data = await extract.getCompanyData(regionCode, partyId) - - // Transform to new structure - const mapped = transform.company.transformCompany(data) - - // Load to CRM database - await load.company.loadCompany(mapped) - - await importCompanies.setImportedStatus(regionCode, partyId) - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue) { - const count = await importCompanies.getPendingCount() - - if (count === 0) { - await messageQueue.deleteQueue('__state__completed__licence-import.import-company') - await messageQueue.publish(QueueLicencesSystemJob.createMessage()) - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) - } -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME, - options -} diff --git a/src/modules/licence-import/jobs/import-licence-system.js b/src/modules/licence-import/jobs/import-licence-system.js deleted file mode 100644 index 821a3fa3..00000000 --- a/src/modules/licence-import/jobs/import-licence-system.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict' - -const WaterSystemService = require('../../../lib/services/water-system-service.js') -const QueueLicencesJob = require('./queue-licences.js') - -const JOB_NAME = 'licence-import.import-licence-system' -const STATUS_NO_CONTENT = 204 - -const options = { - teamSize: 75, teamConcurrency: 1 -} - -function createMessage (data) { - return { - name: JOB_NAME, - data, - options: { - singletonKey: `${JOB_NAME}.${data.licenceNumber}` - } - } -} - -async function handler (job) { - try { - if (job.data.jobNumber === 1) { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - } - - const { licenceNumber } = job.data - - const response = await WaterSystemService.postImportLicence({ - licenceRef: licenceNumber - }) - - if (response.statusCode !== STATUS_NO_CONTENT) { - throw new Error(`Licence ${licenceNumber} failed with status code - ${response.statusCode}`) - } - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - try { - const { data } = job.data.request - - if (data.jobNumber === data.numberOfJobs) { - await messageQueue.publish(QueueLicencesJob.createMessage()) - global.GlobalNotifier.omg(`${JOB_NAME}: finished`, { numberOfJobs: job.data.request.data.numberOfJobs }) - } - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -module.exports = { - createMessage, handler, name: JOB_NAME, options, onComplete -} diff --git a/src/modules/licence-import/jobs/import-licence.js b/src/modules/licence-import/jobs/import-licence.js deleted file mode 100644 index 4ebe3458..00000000 --- a/src/modules/licence-import/jobs/import-licence.js +++ /dev/null @@ -1,92 +0,0 @@ -'use strict' - -const extract = require('../extract') -const load = require('../load') -const ImportPointsJob = require('./import-points.js') -const transform = require('../transform') - -const JOB_NAME = 'licence-import.import-licence' - -const options = { - teamSize: 75, - teamConcurrency: 1 -} - -/** - * Data needed by the import licence handler to process the job - * - * This is a convention with PGBoss. A number of the jobs/handlers implement a `createMessage()` function which returns - * a data object that will be used to queue the job. When it then gets processed the data object is passed to the - * handler. - * - * It may also contain non-default config to be used by PGBoss when adding the job, for example, the use of - * `singletonKey` in this job. - * - * @param {Object} data - information needed for the handler to complete the job - * @param {Object.string} data.licenceNumber - reference of the licence to import - * @param {Object.number} data.jobNumber - index position of this job from all jobs when added to the queue - * @param {Object.number} data.numberOfJobs - total number of import-licence jobs queued in this session - * - * @return {Object} the message object used by the handler to process the job - */ -function createMessage (data) { - return { - name: JOB_NAME, - data, - options: { - singletonKey: `${JOB_NAME}.${data.licenceNumber}` - } - } -} - -async function handler (job) { - try { - // Most 'jobs' are single operation things in the licence import process, for example, delete any removed documents - // or import the purposes types. However, there are typically 69K instances of this job queued up as part of the - // process! Previously, we logged every instance hence this was a primary offender in adding noise to the logs. We - // removed that logging but that leaves us with no way of confirming the job is running. So, instead we get - // src/modules/licence-import/jobs/queue-licences.js to include details on how many jobs are queued and when each - // one was added to the queue. We then use this information to log when the first is picked up and the last. - // - // N.B. It's not entirely accurate. If you added logging for all back in you might see the start message appear - // after a few jobs and likewise the finished message a few before the end. But it's good enough to give an - // indication that the 'jobs' did start and finish. - if (job.data.jobNumber === 1) { - global.GlobalNotifier.omg(`${JOB_NAME}: started`, { numberOfJobs: job.data.numberOfJobs }) - } - - // Extract data - const data = await extract.getLicenceData(job.data.licenceNumber) - - // Transform to new structure - const mapped = transform.licence.transformLicence(data) - - // Load licence to DB - await load.licence.loadLicence(mapped) - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - try { - const { data } = job.data.request - - if (data.jobNumber === data.numberOfJobs) { - await messageQueue.publish(ImportPointsJob.createMessage()) - global.GlobalNotifier.omg(`${JOB_NAME}: finished`, { numberOfJobs: job.data.request.data.numberOfJobs }) - } - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -module.exports = { - createMessage, - handler, - name: JOB_NAME, - onComplete, - options -} diff --git a/src/modules/licence-import/jobs/import-points.js b/src/modules/licence-import/jobs/import-points.js deleted file mode 100644 index 9406d06a..00000000 --- a/src/modules/licence-import/jobs/import-points.js +++ /dev/null @@ -1,89 +0,0 @@ -'use strict' - -const { pool } = require('../../../lib/connectors/db') - -const JOB_NAME = 'licence-import.import-points' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME, - expireIn: '1 hours' - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - return _importPoints() - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete () { - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -async function _importPoints () { - return pool.query(` - INSERT INTO water.licence_version_purpose_points ( - licence_version_purpose_id, - description, - ngr_1, - ngr_2, - ngr_3, - ngr_4, - external_id, - nald_point_id - ) - SELECT - lvp.licence_version_purpose_id, - np."LOCAL_NAME" AS description, - concat_ws(' ', np."NGR1_SHEET", np."NGR1_EAST", np."NGR1_NORTH") AS ngr_1, - ( - CASE np."NGR2_SHEET" - WHEN 'null' THEN NULL - ELSE concat_ws(' ', np."NGR2_SHEET", np."NGR2_EAST", np."NGR2_NORTH") - END - ) AS ngr_2, - ( - CASE np."NGR3_SHEET" - WHEN 'null' THEN NULL - ELSE concat_ws(' ', np."NGR3_SHEET", np."NGR3_EAST", np."NGR3_NORTH") - END - ) AS ngr_3, - ( - CASE np."NGR4_SHEET" - WHEN 'null' THEN NULL - ELSE concat_ws(' ', np."NGR4_SHEET", np."NGR4_EAST", np."NGR4_NORTH") - END - ) AS ngr_4, - (concat_ws(':', napp."FGAC_REGION_CODE", napp."AABP_ID", napp."AAIP_ID")) AS external_id, - napp."AAIP_ID"::integer AS nald_point_id - FROM - "import"."NALD_ABS_PURP_POINTS" napp - INNER JOIN water.licence_version_purposes lvp - ON napp."FGAC_REGION_CODE" = split_part(lvp.external_id, ':', 1) AND napp."AABP_ID" = split_part(lvp.external_id, ':', 2) - INNER JOIN import."NALD_POINTS" np - ON np."ID" = napp."AAIP_ID" AND np."FGAC_REGION_CODE" = napp."FGAC_REGION_CODE" - ON CONFLICT(external_id) DO - UPDATE SET - description=excluded.description, - ngr_1=excluded.ngr_1, - ngr_2=excluded.ngr_2, - ngr_3=excluded.ngr_3, - ngr_4=excluded.ngr_4; - `) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/licence-import/jobs/import-purpose-condition-types.js b/src/modules/licence-import/jobs/import-purpose-condition-types.js deleted file mode 100644 index f19a94a0..00000000 --- a/src/modules/licence-import/jobs/import-purpose-condition-types.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const QueueCompaniesJob = require('./queue-companies.js') -const purposeConditionsConnector = require('../connectors/purpose-conditions-types') - -const JOB_NAME = 'licence-import.import-purpose-condition-types' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME, - expireIn: '1 hours' - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - return purposeConditionsConnector.createPurposeConditionTypes() - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - await messageQueue.publish(QueueCompaniesJob.createMessage()) - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/licence-import/jobs/queue-companies.js b/src/modules/licence-import/jobs/queue-companies.js deleted file mode 100644 index 17040b3c..00000000 --- a/src/modules/licence-import/jobs/queue-companies.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict' - -const importCompanies = require('../connectors/import-companies') -const ImportCompanyJob = require('./import-company.js') - -const JOB_NAME = 'licence-import.queue-companies' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME, - expireIn: '1 hours' - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - await importCompanies.clear() - const data = await importCompanies.initialise() - - return data.map((row) => { - return { - regionCode: parseInt(row.region_code), - partyId: parseInt(row.party_id) - } - }) - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - const { value: parties } = job.data.response - const numberOfJobs = parties.length - - for (const [index, party] of parties.entries()) { - // This information is to help us log when the import company jobs start and finish. See - // src/modules/licence-import/jobs/import-company.js for more details - const data = { - regionCode: party.regionCode, - partyId: party.partyId, - jobNumber: index + 1, - numberOfJobs - } - await messageQueue.publish(ImportCompanyJob.createMessage(data)) - } - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/licence-import/jobs/queue-licences-system.js b/src/modules/licence-import/jobs/queue-licences-system.js deleted file mode 100644 index cfca07fd..00000000 --- a/src/modules/licence-import/jobs/queue-licences-system.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const extract = require('../extract') -const ImportLicenceSystemJob = require('./import-licence-system.js') - -const JOB_NAME = 'licence-import.queue-licences-system' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME, expireIn: '1 hours' - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - const rows = await extract.getAllLicenceNumbers() - - return rows - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - const { value: licences } = job.data.response - const numberOfJobs = licences.length - - for (const [index, licence] of licences.entries()) { - // This information is to help us log when the import licence jobs start and finish. See - // src/modules/licence-import/jobs/import-licence.js for more details - const data = { - licenceNumber: licence.LIC_NO, - jobNumber: index + 1, - numberOfJobs - } - - await messageQueue.publish(ImportLicenceSystemJob.createMessage(data)) - } - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, handler, onComplete, name: JOB_NAME -} diff --git a/src/modules/licence-import/jobs/queue-licences.js b/src/modules/licence-import/jobs/queue-licences.js deleted file mode 100644 index 03a10638..00000000 --- a/src/modules/licence-import/jobs/queue-licences.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const extract = require('../extract') -const ImportLicenceJob = require('./import-licence.js') - -const JOB_NAME = 'licence-import.queue-licences' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME, - expireIn: '1 hours' - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - const rows = await extract.getAllLicenceNumbers() - - return rows - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - const { value: licences } = job.data.response - const numberOfJobs = licences.length - - for (const [index, licence] of licences.entries()) { - // This information is to help us log when the import licence jobs start and finish. See - // src/modules/licence-import/jobs/import-licence.js for more details - const data = { - licenceNumber: licence.LIC_NO, - jobNumber: index + 1, - numberOfJobs - } - await messageQueue.publish(ImportLicenceJob.createMessage(data)) - } - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/licence-import/load/company.js b/src/modules/licence-import/load/company.js deleted file mode 100644 index 229a5946..00000000 --- a/src/modules/licence-import/load/company.js +++ /dev/null @@ -1,84 +0,0 @@ -const connectors = require('./connectors') -const config = require('../../../../config') - -const getAddresses = company => { - const companyAddresses = company.addresses.map(row => row.address) - - const invoiceAccountAddresses = company.invoiceAccounts.reduce((acc, invoiceAccount) => { - const addresses = invoiceAccount.addresses.map(row => row.address) - return [...acc, ...addresses] - }, []) - - // Allow import of invoice accounts to be disabled for charging go live - const addresses = config.import.licences.isInvoiceAccountImportEnabled - ? [...companyAddresses, ...invoiceAccountAddresses] - : companyAddresses - - return [...new Set(addresses, row => row.externalId)] -} - -const loadAddresses = async company => { - const addresses = getAddresses(company) - const tasks = addresses.map(connectors.createAddress) - return Promise.all(tasks) -} - -const loadContacts = async company => { - const contacts = company.contacts.map(row => row.contact) - const tasks = contacts.map(connectors.createContact) - return Promise.all(tasks) -} - -const loadInvoiceAccount = async (company, invoiceAccount) => { - await connectors.createInvoiceAccount(company, invoiceAccount) - const tasks = invoiceAccount.addresses.map(address => - connectors.createInvoiceAccountAddress(invoiceAccount, address) - ) - return Promise.all(tasks) -} - -const loadInvoiceAccounts = async company => { - // Allow import of invoice accounts to be disabled for charging go live - if (!config.import.licences.isInvoiceAccountImportEnabled) { - return [] - } - - const tasks = company.invoiceAccounts.map(invoiceAccount => - loadInvoiceAccount(company, invoiceAccount) - ) - return Promise.all(tasks) -} - -const loadCompanyContacts = async company => { - const tasks = company.contacts.map(contact => - connectors.createCompanyContact(company, contact) - ) - return Promise.all(tasks) -} - -const loadCompanyAddresses = async company => { - const tasks = company.addresses.map(address => - connectors.createCompanyAddress(company, address) - ) - return Promise.all(tasks) -} - -const loadCompany = async company => { - const entities = await Promise.all([ - connectors.createCompany(company), - loadAddresses(company), - loadContacts(company) - ]) - - const relationships = await Promise.all([ - loadInvoiceAccounts(company), - loadCompanyContacts(company), - loadCompanyAddresses(company) - ]) - - return { entities, relationships } -} - -module.exports = { - loadCompany -} diff --git a/src/modules/licence-import/load/connectors/index.js b/src/modules/licence-import/load/connectors/index.js deleted file mode 100644 index ebc7c8e4..00000000 --- a/src/modules/licence-import/load/connectors/index.js +++ /dev/null @@ -1,168 +0,0 @@ -const { pool } = require('../../../../lib/connectors/db') -const queries = require('./queries') - -const createDocument = async doc => { - const params = [doc.documentRef, doc.startDate, doc.endDate, doc.externalId] - return pool.query(queries.createDocument, params) -} - -const createDocumentRole = async (doc, role) => { - const companyExternalId = role?.company?.externalId ?? null - const contactExternalId = role?.contact?.externalId ?? null - const adressExternalId = role?.address?.externalId ?? null - const invoiceAccountNumber = role?.invoiceAccount?.invoiceAccountNumber ?? null - - const params = [doc.documentRef, role.role, companyExternalId, - contactExternalId, adressExternalId, invoiceAccountNumber, role.startDate, role.endDate] - return pool.query(queries.createDocumentRole, params) -} - -const createCompany = company => { - const params = [company.name, company.type, company.externalId] - return pool.query(queries.createCompany, params) -} - -const createAddress = address => { - const params = [address.address1, address.address2, address.address3, address.address4, - address.town, address.county, address.postcode, address.country, address.externalId] - return pool.query(queries.createAddress, params) -} - -const createContact = contact => { - const params = [contact.salutation, contact.initials, contact.firstName, contact.lastName, contact.externalId] - return pool.query(queries.createContact, params) -} - -const createInvoiceAccount = (company, invoiceAccount) => { - const params = [invoiceAccount.invoiceAccountNumber, invoiceAccount.startDate, invoiceAccount.endDate, company.externalId] - return pool.query(queries.createInvoiceAccount, params) -} - -const createInvoiceAccountAddress = (invoiceAccount, invoiceAccountAddress) => { - const invoiceAccountAddressExternalId = invoiceAccountAddress?.agentCompany?.externalId ?? null - const params = [ - invoiceAccount.invoiceAccountNumber, - invoiceAccountAddress.address.externalId, - invoiceAccountAddress.startDate, - invoiceAccountAddress.endDate, - invoiceAccountAddressExternalId - ] - return pool.query(queries.createInvoiceAccountAddress, params) -} - -const createCompanyContact = (company, contact) => { - const params = [company.externalId, contact.contact.externalId, contact.role, contact.startDate, contact.endDate] - return pool.query(queries.createCompanyContact, params) -} - -const createCompanyAddress = (company, address) => { - const params = [company.externalId, address.address.externalId, address.role, address.startDate, address.endDate] - return pool.query(queries.createCompanyAddress, params) -} - -const createAgreement = (licence, agreement) => { - const params = [licence.licenceNumber, agreement.agreementCode, agreement.startDate, agreement.endDate] - return pool.query(queries.createAgreement, params) -} - -const createLicenceVersion = async (version, licenceId) => { - const params = [ - licenceId, - version.issue, - version.increment, - version.status, - version.startDate, - version.endDate, - version.externalId - ] - - const result = await pool.query(queries.createLicenceVersion, params) - return result.rows[0] -} - -const createLicence = async licence => { - const result = await pool.query(queries.createLicence, [ - licence.regionCode, - licence.licenceNumber, - licence.isWaterUndertaker, - licence.regions, - licence.startDate, - licence.expiredDate, - licence.lapsedDate, - licence.revokedDate - ]) - - return result.rows[0] -} - -const createLicenceVersionPurpose = async (purpose, licenceVersionId) => { - const params = [ - licenceVersionId, - purpose.purposePrimary, - purpose.purposeSecondary, - purpose.purposeUse, - purpose.abstractionPeriodStartDay, - purpose.abstractionPeriodStartMonth, - purpose.abstractionPeriodEndDay, - purpose.abstractionPeriodEndMonth, - purpose.timeLimitedStartDate, - purpose.timeLimitedEndDate, - purpose.notes, - purpose.instantQuantity, - purpose.hourlyQuantity, - purpose.dailyQuantity, - purpose.annualQuantity, - purpose.externalId - ] - const result = await pool.query(queries.createLicenceVersionPurpose, params) - return result.rows[0] -} - -const getLicenceByRef = async licenceRef => { - const result = await pool.query(queries.getLicenceByRef, [licenceRef]) - return result.rows[0] -} - -const flagLicenceForSupplementaryBilling = async licenceId => pool.query(queries.flagLicenceForSupplementaryBilling, [licenceId]) - -const cleanUpAgreements = licence => { - // Create keys for the agreements we wish to keep - const keys = licence.agreements.map(agreement => - `${agreement.agreementCode}:${agreement.startDate}`) - - return pool.query(queries.cleanUpAgreements, [licence.licenceNumber, keys]) -} - -const createPurposeConditionTypes = async () => pool.query(queries.createPurposeConditionTypes) - -const createPurposeCondition = (condition, purposeId) => - pool.query(queries.createPurposeCondition, [ - purposeId, - condition.code, - condition.subcode, - condition.param1, - condition.param2, - condition.notes, - condition.externalId - ]) - -module.exports = { - createPurposeCondition, - createPurposeConditionTypes, - createAddress, - createAgreement, - createCompany, - createCompanyAddress, - createCompanyContact, - createContact, - createDocument, - createDocumentRole, - createInvoiceAccount, - createInvoiceAccountAddress, - createLicence, - createLicenceVersion, - createLicenceVersionPurpose, - getLicenceByRef, - flagLicenceForSupplementaryBilling, - cleanUpAgreements -} diff --git a/src/modules/licence-import/load/connectors/queries.js b/src/modules/licence-import/load/connectors/queries.js deleted file mode 100644 index 824bd361..00000000 --- a/src/modules/licence-import/load/connectors/queries.js +++ /dev/null @@ -1,321 +0,0 @@ -const createDocument = ` - INSERT INTO crm_v2.documents (regime, document_type, document_ref, start_date, end_date, external_id, date_created, date_updated, date_deleted) - VALUES ('water', 'abstraction_licence', $1, $2, $3, $4, NOW(), NOW(), null) - ON CONFLICT (regime, document_type, document_ref) - DO UPDATE SET - start_date=EXCLUDED.start_date, - end_date=EXCLUDED.end_date, - external_id=EXCLUDED.external_id, - date_updated=EXCLUDED.date_updated, - date_deleted=EXCLUDED.date_deleted;` - -const createDocumentRole = `INSERT INTO crm_v2.document_roles (document_id, role_id, company_id, contact_id, address_id, start_date, end_date, date_created, date_updated, invoice_account_id) -SELECT d.document_id, r.role_id, c.company_id, co.contact_id, a.address_id, $7, $8, NOW(), NOW(), ia.invoice_account_id - FROM crm_v2.documents d - JOIN crm_v2.roles r ON r.name=$2 - LEFT JOIN crm_v2.companies c ON c.external_id=$3 - LEFT JOIN crm_v2.contacts co ON co.external_id=$4 - LEFT JOIN crm_v2.addresses a ON a.external_id=$5 - LEFT JOIN crm_v2.invoice_accounts ia ON ia.invoice_account_number=$6 - WHERE d.document_ref=$1 -ON CONFLICT (document_id, role_id, start_date) - DO UPDATE SET - company_id=EXCLUDED.company_id, - contact_id=EXCLUDED.contact_id, - address_id=EXCLUDED.address_id, - invoice_account_id=EXCLUDED.invoice_account_id, - end_date=EXCLUDED.end_date, - date_updated=EXCLUDED.date_updated;` - -const createCompany = `INSERT INTO crm_v2.companies (name, type, external_id, date_created, date_updated, current_hash) -VALUES ($1, $2, $3, NOW(), NOW(), md5(CONCAT($1::varchar, $2::varchar)::varchar)) ON CONFLICT (external_id) DO UPDATE SET name=EXCLUDED.name, -date_updated=EXCLUDED.date_updated, type=EXCLUDED.type, last_hash=EXCLUDED.current_hash, current_hash=md5(CONCAT(EXCLUDED.name::varchar,EXCLUDED.type::varchar)::varchar);` - -const createAddress = `INSERT INTO crm_v2.addresses (address_1, address_2, address_3, address_4, -town, county, postcode, country, external_id, data_source, date_created, date_updated, current_hash) -VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, 'nald', NOW(), NOW(), md5( -CONCAT( - $1::varchar, $2::varchar, $3::varchar, $4::varchar, $5::varchar, $6::varchar, $7::varchar - )::varchar -)) ON CONFLICT (external_id) DO UPDATE SET -address_1=EXCLUDED.address_1, -address_2=EXCLUDED.address_2, -address_3=EXCLUDED.address_3, -address_4=EXCLUDED.address_4, -town=EXCLUDED.town, -county=EXCLUDED.county, -postcode=EXCLUDED.postcode, -country=EXCLUDED.country, -last_hash=EXCLUDED.current_hash, -current_hash=md5(CONCAT( -EXCLUDED.address_1::varchar, -EXCLUDED.address_2::varchar, -EXCLUDED.address_3::varchar, -EXCLUDED.address_4::varchar, -EXCLUDED.town::varchar, -EXCLUDED.county::varchar, -EXCLUDED.postcode::varchar -)::varchar), -date_updated=EXCLUDED.date_updated;` - -const createContact = `INSERT INTO crm_v2.contacts (salutation, initials, first_name, last_name, external_id, data_source, date_created, date_updated, current_hash) -VALUES ($1, $2, $3, $4, $5, 'nald', NOW(), NOW(), md5(CONCAT($1::varchar,$3::varchar,$4::varchar)::varchar)) ON CONFLICT (external_id) DO UPDATE SET - salutation=EXCLUDED.salutation, - initials=EXCLUDED.initials, - first_name=EXCLUDED.first_name, - last_name=EXCLUDED.last_name, - external_id=EXCLUDED.external_id, - date_updated=EXCLUDED.date_updated, - last_hash=EXCLUDED.current_hash, - current_hash=md5(CONCAT( - EXCLUDED.salutation::varchar, - EXCLUDED.first_name::varchar, - EXCLUDED.last_name::varchar - )::varchar);` - -const createInvoiceAccount = `INSERT INTO crm_v2.invoice_accounts (company_id, invoice_account_number, start_date, end_date, date_created, date_updated) -SELECT company_id, $1, $2, $3, NOW(), NOW() FROM crm_v2.companies WHERE external_id=$4 -ON CONFLICT (invoice_account_number) DO UPDATE SET - company_id=EXCLUDED.company_id, - start_date=EXCLUDED.start_date, - end_date=EXCLUDED.end_date, - date_updated=EXCLUDED.date_updated;` - -const createInvoiceAccountAddress = `INSERT INTO crm_v2.invoice_account_addresses (invoice_account_id, address_id, agent_company_id, start_date, end_date, date_updated, date_created) -SELECT ia.invoice_account_id, a.address_id, c.company_id, $3, $4, NOW(), NOW() -FROM crm_v2.invoice_accounts ia -JOIN crm_v2.addresses a ON a.external_id=$2 -LEFT JOIN crm_v2.companies c ON c.external_id=$5 -WHERE ia.invoice_account_number=$1 ON CONFLICT (invoice_account_id, start_date) DO UPDATE SET - address_id=EXCLUDED.address_id, - date_updated=EXCLUDED.date_updated, - agent_company_id=EXCLUDED.agent_company_id;` - -const createCompanyContact = `INSERT INTO crm_v2.company_contacts (company_id, contact_id, role_id, start_date, end_date, is_default, date_created, date_updated) -SELECT c.company_id, o.contact_id, r.role_id, $4, $5, true, NOW(), NOW() -FROM crm_v2.companies c -JOIN crm_v2.contacts o ON o.external_id=$2 -JOIN crm_v2.roles r ON r.name=$3 -WHERE c.external_id=$1 ON CONFLICT (company_id, contact_id, role_id, start_date) DO UPDATE SET - contact_id=EXCLUDED.contact_id, - is_default=EXCLUDED.is_default, - end_date=EXCLUDED.end_date, - date_updated=EXCLUDED.date_updated;` - -const createCompanyAddress = `INSERT INTO crm_v2.company_addresses (company_id, address_id, role_id, start_date, end_date, is_default, date_created, date_updated) -SELECT c.company_id, a.address_id, r.role_id, $4, $5, true, NOW(), NOW() -FROM crm_v2.companies c -JOIN crm_v2.addresses a ON a.external_id=$2 -JOIN crm_v2.roles r ON r.name=$3 -WHERE c.external_id=$1 -ON CONFLICT (company_id, address_id, role_id) DO UPDATE SET - address_id=EXCLUDED.address_id, - is_default=EXCLUDED.is_default, - end_date=EXCLUDED.end_date, - date_updated=EXCLUDED.date_updated` - -const createAgreement = `insert into water.licence_agreements (licence_ref, financial_agreement_type_id, start_date, end_date, date_created, date_updated, source) - select $1, t.financial_agreement_type_id, $3, $4, NOW(), NOW(), 'nald' - from water.financial_agreement_types t - where t.financial_agreement_code=$2 on conflict (licence_ref, financial_agreement_type_id, start_date) WHERE date_deleted is null - do update set end_date=EXCLUDED.end_date, date_updated=EXCLUDED.date_updated, source=EXCLUDED.source;` - -const createLicence = `insert into water.licences (region_id, licence_ref, is_water_undertaker, regions, start_date, expired_date, lapsed_date, revoked_date) - values ( - (select region_id from water.regions where nald_region_id = $1), - $2, - $3, - $4, - $5, - $6, - $7, - $8 - ) on conflict (licence_ref) do update set - is_water_undertaker=excluded.is_water_undertaker, - regions=excluded.regions, - start_date=excluded.start_date, - expired_date=excluded.expired_date, - lapsed_date=excluded.lapsed_date, - revoked_date=excluded.revoked_date, - date_updated=now() - returning licence_id;` - -const createLicenceVersion = `insert into water.licence_versions ( - licence_id, - issue, - increment, - status, - start_date, - end_date, - external_id, - date_created, - date_updated - ) values ($1, $2, $3, $4, $5, $6, $7, now(), now()) on conflict (external_id) do update set - licence_id = excluded.licence_id, - status = excluded.status, - start_date = excluded.start_date, - end_date = excluded.end_date, - date_updated = now() - returning licence_version_id;` - -const createLicenceVersionPurpose = `insert into water.licence_version_purposes ( - licence_version_id, - purpose_primary_id, - purpose_secondary_id, - purpose_use_id, - abstraction_period_start_day, - abstraction_period_start_month, - abstraction_period_end_day, - abstraction_period_end_month, - time_limited_start_date, - time_limited_end_date, - notes, - instant_quantity, - hourly_quantity, - daily_quantity, - annual_quantity, - external_id, - date_created, - date_updated - ) values ( - $1, - (select purpose_primary_id from water.purposes_primary where legacy_id = $2), - (select purpose_secondary_id from water.purposes_secondary where legacy_id = $3), - (select purpose_use_id from water.purposes_uses where legacy_id = $4), - $5, - $6, - $7, - $8, - $9, - $10, - $11, - $12, - $13, - $14, - $15, - $16, - now(), - now() - ) on conflict (external_id) do update set - purpose_primary_id = excluded.purpose_primary_id, - purpose_secondary_id = excluded.purpose_secondary_id, - purpose_use_id = excluded.purpose_use_id, - abstraction_period_start_day = excluded.abstraction_period_start_day, - abstraction_period_start_month = excluded.abstraction_period_start_month, - abstraction_period_end_day = excluded.abstraction_period_end_day, - abstraction_period_end_month = excluded.abstraction_period_end_month, - time_limited_start_date = excluded.time_limited_start_date, - time_limited_end_date = excluded.time_limited_end_date, - notes = excluded.notes, - instant_quantity = excluded.instant_quantity, - hourly_quantity = excluded.hourly_quantity, - daily_quantity = excluded.daily_quantity, - annual_quantity = excluded.annual_quantity, - date_updated = now() - returning licence_version_purpose_id;` - -const getLicenceByRef = 'SELECT * FROM water.licences WHERE licence_ref = $1' - -// Only update the appropriate scheme's flag depending on what the licence is linked to; if both flag both, just got -// charge versions for one scheme then flag only it, else has no charge versions then do not flag at all. -// This updates the query to handle new SROC billing plus fixes an old problem of licences with no charge versions -// were getting flagged (with no charge versions they can't be billed and the flag then cleared). -// -// Also, we use the date rather than the scheme column because we have found examples of charge versions with start -// dates greater than 2022-04-01 (when SROC replaced ALCS) where the scheme is set to 'alcs'. -const flagLicenceForSupplementaryBilling = ` - UPDATE water.licences l - SET include_in_supplementary_billing = CASE - WHEN EXISTS ( - SELECT 1 - FROM water.charge_versions cv - WHERE cv.licence_id = l.licence_id - AND cv.start_date < '2022-04-01'::Date - ) THEN 'yes' - ELSE include_in_supplementary_billing - END, - include_in_sroc_supplementary_billing = CASE - WHEN EXISTS ( - SELECT 1 - FROM water.charge_versions cv - WHERE cv.licence_id = l.licence_id - AND cv.start_date >= '2022-04-01'::Date - ) THEN TRUE - ELSE include_in_sroc_supplementary_billing - END - WHERE l.licence_id = $1; -` - -const cleanUpAgreements = ` -delete - from water.licence_agreements la - using water.financial_agreement_types fat - where - la.licence_ref=$1 - and la.source='nald' - and concat_ws(':', fat.financial_agreement_code, la.start_date) <> any ($2) - and la.financial_agreement_type_id=fat.financial_agreement_type_id -` - -const createPurposeConditionTypes = ` -INSERT INTO water.licence_version_purpose_condition_types ( - code, - subcode, - description, - subcode_description - ) - SELECT "CODE", "SUBCODE", "DESCR", "SUBCODE_DESC" FROM import."NALD_LIC_COND_TYPES" - WHERE "AFFECTS_ABS" = 'Y' - ON CONFLICT (code, subcode) - DO UPDATE SET - description = excluded.description, - subcode_description = excluded.subcode_description, - date_updated = now(); -` - -const createPurposeCondition = ` -INSERT INTO water.licence_version_purpose_conditions ( - licence_version_purpose_id, - licence_version_purpose_condition_type_id, - param_1, - param_2, - notes, - external_id - ) VALUES ( - $1, - (SELECT licence_version_purpose_condition_type_id - FROM water.licence_version_purpose_condition_types - WHERE code = $2 AND subcode = $3), - $4, - $5, - $6, - $7) -ON CONFLICT (external_id) -DO UPDATE SET - licence_version_purpose_condition_type_id = excluded.licence_version_purpose_condition_type_id, - param_1 = excluded.param_1, - param_2 = excluded.param_2, - notes = excluded.notes, - date_updated = now(); -` - -module.exports = { - createDocument, - createDocumentRole, - createCompany, - createAddress, - createContact, - createInvoiceAccount, - createInvoiceAccountAddress, - createCompanyContact, - createCompanyAddress, - createAgreement, - createLicence, - createLicenceVersion, - createLicenceVersionPurpose, - getLicenceByRef, - flagLicenceForSupplementaryBilling, - cleanUpAgreements, - createPurposeConditionTypes, - createPurposeCondition -} diff --git a/src/modules/licence-import/load/index.js b/src/modules/licence-import/load/index.js deleted file mode 100644 index aee8b208..00000000 --- a/src/modules/licence-import/load/index.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - company: require('./company'), - licence: require('./licence'), - purposeConditionTypes: require('./purpose-condition-types') -} diff --git a/src/modules/licence-import/load/licence.js b/src/modules/licence-import/load/licence.js deleted file mode 100644 index de41292c..00000000 --- a/src/modules/licence-import/load/licence.js +++ /dev/null @@ -1,94 +0,0 @@ -'use strict' - -const connectors = require('./connectors') -const config = require('../../../../config') -const roles = require('../transform/mappers/roles') - -const isImportableRole = role => { - if (config.import.licences.isBillingDocumentRoleImportEnabled) { - return true - } - - // Where the import of billing document roles is disabled via the - // config flag, we will import any role where it does not have - // a "billing" role - return role.role !== roles.ROLE_BILLING -} - -const loadDocumentRoles = async document => { - const tasks = document.roles - .filter(isImportableRole) - .map(role => - connectors.createDocumentRole(document, role) - ) - return Promise.all(tasks) -} - -const loadDocument = async document => { - await connectors.createDocument(document) - return loadDocumentRoles(document) -} - -const loadAgreements = async licence => { - // Allow import of licence agreements to be disabled for charging go live - if (!config.import.licences.isLicenceAgreementImportEnabled) { - return Promise.resolve([]) - } - - // Deletes any "nald" agreements not found via the current import process - await connectors.cleanUpAgreements(licence) - - const tasks = licence.agreements.map(agreement => - connectors.createAgreement(licence, agreement) - ) - return Promise.all(tasks) -} - -const loadPurposeConditions = (purposeId, purpose) => { - return Promise.all(purpose.conditions.map(condition => { - return connectors.createPurposeCondition(condition, purposeId) - })) -} - -const loadVersionPurposes = (licenceVersionId, version) => { - return Promise.all(version.purposes.map(async purpose => { - const purposeResult = await connectors.createLicenceVersionPurpose(purpose, licenceVersionId) - return loadPurposeConditions(purposeResult.licence_version_purpose_id, purpose) - })) -} - -const loadVersions = (licenceData, licenceId) => { - return Promise.all(licenceData.versions.map(async version => { - const versionResult = await connectors.createLicenceVersion(version, licenceId) - return loadVersionPurposes(versionResult.licence_version_id, version) - })) -} - -const loadLicence = async licence => { - const licencePriorToImport = await connectors.getLicenceByRef(licence.licenceNumber) - - const tasks = [ - connectors.createLicence(licence), - loadDocument(licence.document), - loadAgreements(licence) - ] - - /* - * If the expired_date, lapsed_date or revoked_date are changed, - * flag the licence for supplementary billing - */ - if (licencePriorToImport && ((licencePriorToImport.expired_date !== licence.expiredDate) || - (licencePriorToImport.lapsed_date !== licence.lapsedDate) || - (licencePriorToImport.revoked_date !== licence.revokedDate) - )) { - tasks.push(connectors.flagLicenceForSupplementaryBilling(licencePriorToImport.licence_id)) - } - - const [savedLicence] = await Promise.all(tasks) - - return loadVersions(licence, savedLicence.licence_id) -} - -module.exports = { - loadLicence -} diff --git a/src/modules/licence-import/load/purpose-condition-types.js b/src/modules/licence-import/load/purpose-condition-types.js deleted file mode 100644 index 4a249c5d..00000000 --- a/src/modules/licence-import/load/purpose-condition-types.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const connectors = require('./connectors') - -const createPurposeConditionTypes = () => connectors.createPurposeConditionTypes() - -module.exports = { - createPurposeConditionTypes -} diff --git a/src/modules/licence-import/plugin.js b/src/modules/licence-import/plugin.js deleted file mode 100644 index 11bf33d5..00000000 --- a/src/modules/licence-import/plugin.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict' - -const cron = require('node-cron') - -const DeleteRemovedDocumentsJob = require('./jobs/delete-removed-documents.js') -const ImportCompanyJob = require('./jobs/import-company.js') -const ImportLicenceJob = require('./jobs/import-licence.js') -const ImportLicenceSystemJob = require('./jobs/import-licence-system.js') -const ImportPointsJob = require('./jobs/import-points.js') -const ImportPurposeConditionTypesJob = require('./jobs/import-purpose-condition-types.js') -const QueueCompaniesJob = require('./jobs/queue-companies.js') -const QueueLicencesJob = require('./jobs/queue-licences.js') -const QueueLicencesSystemJob = require('./jobs/queue-licences-system.js') - -const config = require('../../../config') - -async function register (server, _options) { - // First step is to remove any documents that no longer exist in NALD - await server.messageQueue.subscribe(DeleteRemovedDocumentsJob.name, DeleteRemovedDocumentsJob.handler) - await server.messageQueue.onComplete(DeleteRemovedDocumentsJob.name, (executedJob) => { - return DeleteRemovedDocumentsJob.onComplete(server.messageQueue, executedJob) - }) - - // When the documents have been marked as deleted import a list of all companies into the - // water_import.company_import table - await server.messageQueue.subscribe(ImportPurposeConditionTypesJob.name, ImportPurposeConditionTypesJob.handler) - await server.messageQueue.onComplete(ImportPurposeConditionTypesJob.name, (executedJob) => { - return ImportPurposeConditionTypesJob.onComplete(server.messageQueue, executedJob) - }) - - // When the water_import.company_import table is ready, jobs are scheduled to import each company - await server.messageQueue.subscribe(QueueCompaniesJob.name, QueueCompaniesJob.handler) - await server.messageQueue.onComplete(QueueCompaniesJob.name, (executedJob) => { - return QueueCompaniesJob.onComplete(server.messageQueue, executedJob) - }) - - await server.messageQueue.subscribe(ImportCompanyJob.name, ImportCompanyJob.options, ImportCompanyJob.handler) - await server.messageQueue.onComplete(ImportCompanyJob.name, () => { - return ImportCompanyJob.onComplete(server.messageQueue) - }) - - await server.messageQueue.subscribe(QueueLicencesSystemJob.name, QueueLicencesSystemJob.handler) - await server.messageQueue.onComplete(QueueLicencesSystemJob.name, (executedJob) => { - return QueueLicencesSystemJob.onComplete(server.messageQueue, executedJob) - }) - - await server.messageQueue.subscribe(ImportLicenceSystemJob.name, ImportLicenceSystemJob.options, ImportLicenceSystemJob.handler) - await server.messageQueue.onComplete(ImportLicenceSystemJob.name, (executedJob) => { - return ImportLicenceSystemJob.onComplete(server.messageQueue, executedJob) - }) - - await server.messageQueue.subscribe(QueueLicencesJob.name, QueueLicencesJob.handler) - await server.messageQueue.onComplete(QueueLicencesJob.name, (executedJob) => { - return QueueLicencesJob.onComplete(server.messageQueue, executedJob) - }) - - await server.messageQueue.subscribe(ImportLicenceJob.name, ImportLicenceJob.options, ImportLicenceJob.handler) - await server.messageQueue.onComplete(ImportLicenceJob.name, (executedJob) => { - return ImportLicenceJob.onComplete(server.messageQueue, executedJob) - }) - - await server.messageQueue.subscribe(ImportPointsJob.name, ImportPointsJob.options, ImportPointsJob.handler) - await server.messageQueue.onComplete(ImportPointsJob.name, () => { - return ImportPointsJob.onComplete() - }) - - cron.schedule(config.import.licences.schedule, async () => { - await server.messageQueue.publish(DeleteRemovedDocumentsJob.createMessage()) - }) -} - -module.exports = { - plugin: { - name: 'importLicenceData', - dependencies: ['pgBoss'], - register - } -} diff --git a/src/modules/licence-import/routes.js b/src/modules/licence-import/routes.js deleted file mode 100644 index f22af196..00000000 --- a/src/modules/licence-import/routes.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict' - -const Joi = require('@hapi/joi') - -const controller = require('./controller') - -module.exports = [ - { - method: 'post', - handler: controller.postImport, - path: '/import/licences' - }, - { - method: 'post', - handler: controller.postImportLicence, - path: '/import/licence', - options: { - validate: { - query: Joi.object({ - licenceNumber: Joi.string().required() - }) - } - } - }, - { - method: 'post', - handler: controller.postImportCompany, - path: '/import/company', - options: { - validate: { - query: Joi.object({ - regionCode: Joi.number().integer().min(1).max(8), - partyId: Joi.number().integer().min(0) - }) - } - } - } -] diff --git a/src/modules/licence-import/transform/company.js b/src/modules/licence-import/transform/company.js deleted file mode 100644 index 55709161..00000000 --- a/src/modules/licence-import/transform/company.js +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Loads the CRM v2 data model for a given licence from NALD data in the database, - * ready to store in service tables - * @TODO - * - write data to target tables - */ -const mappers = require('./mappers') - -const mapContactData = data => ({ - parties: mappers.party.mapParties([data.party]), - addresses: mappers.address.mapAddresses(data.addresses) -}) - -const transformCompany = companyData => { - const contact = mappers.contact.mapContact(companyData.party) - const company = mappers.company.mapCompany(companyData.party) - - const context = mapContactData(companyData) - - company.invoiceAccounts = mappers.invoiceAccount.mapInvoiceAccounts(companyData.invoiceAccounts, context) - company.addresses = mappers.companyAddress.mapCompanyAddresses(companyData.licenceVersions, companyData.invoiceAccounts, companyData.licenceRoles, context) - company.contacts = mappers.companyContact.mapCompanyContacts(contact, companyData.licenceVersions, companyData.invoiceAccounts) - - return mappers.licence.omitNaldData(company) -} - -module.exports = { - transformCompany -} diff --git a/src/modules/licence-import/transform/index.js b/src/modules/licence-import/transform/index.js deleted file mode 100644 index 13bb61d3..00000000 --- a/src/modules/licence-import/transform/index.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - company: require('./company'), - licence: require('./licence') -} diff --git a/src/modules/licence-import/transform/licence.js b/src/modules/licence-import/transform/licence.js deleted file mode 100644 index 5e7a5470..00000000 --- a/src/modules/licence-import/transform/licence.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict' - -const mappers = require('./mappers') - -const mapContactData = data => ({ - parties: mappers.party.mapParties(data.parties), - addresses: mappers.address.mapAddresses(data.addresses) -}) - -/** - * Gets the CRM v2 licence model from data in the import database - * @param {String} licenceNumber - The licence to load - * @return {Promise} - the CRM v2 licence model - */ -const transformLicence = licenceData => { - // Get licence - const licence = mappers.licence.mapLicence(licenceData.licence, licenceData.versions) - const purposes = licenceData.purposes.map(mappers.licencePurpose.mapLicencePurpose) - const conditions = licenceData.conditions.map(mappers.purposeCondition.mapPurposeConditionFromNALD) - - // Get documents - licence.document = mappers.document.mapLicenceToDocument(licence) - - // Get party/address data - const context = mapContactData(licenceData) - - // Get licence holder/billing document roles - licence.document.roles = [ - ...mappers.role.mapLicenceHolderRoles(licence.document, licenceData.versions, context), - ...mappers.role.mapLicenceRoles(licenceData.roles, context) - ] - - // Agreements - section 127/130 - licence.agreements = mappers.agreement.mapAgreements(licenceData.tptAgreements, licenceData.section130Agreements) - - licence.versions = licenceData.versions.map(version => { - return mappers.licenceVersion.mapLicenceVersion(version, purposes, conditions) - }) - - const finalLicence = mappers.licence.omitNaldData(licence) - - return finalLicence -} - -module.exports = { - transformLicence -} diff --git a/src/modules/licence-import/transform/mappers/address.js b/src/modules/licence-import/transform/mappers/address.js deleted file mode 100644 index 92bc5ee0..00000000 --- a/src/modules/licence-import/transform/mappers/address.js +++ /dev/null @@ -1,29 +0,0 @@ -const str = require('./str') -const { createRegionSkeleton } = require('./region-skeleton') - -const mapAddress = address => ({ - address1: str.mapNull(address.ADDR_LINE1), - address2: str.mapNull(address.ADDR_LINE2), - address3: str.mapNull(address.ADDR_LINE3), - address4: str.mapNull(address.ADDR_LINE4), - town: str.mapNull(address.TOWN), - county: str.mapNull(address.COUNTY), - postcode: str.mapNull(address.POSTCODE), - country: str.mapNull(address.COUNTRY), - externalId: `${address.FGAC_REGION_CODE}:${address.ID}`, - _nald: address -}) - -const mapAddresses = (addresses) => { - const mappedAddresses = createRegionSkeleton() - for (const address of addresses) { - mappedAddresses[address.FGAC_REGION_CODE][address.ID] = mapAddress(address) - } - - return mappedAddresses -} - -module.exports = { - mapAddress, - mapAddresses -} diff --git a/src/modules/licence-import/transform/mappers/agreement.js b/src/modules/licence-import/transform/mappers/agreement.js deleted file mode 100644 index 18b73da0..00000000 --- a/src/modules/licence-import/transform/mappers/agreement.js +++ /dev/null @@ -1,95 +0,0 @@ -'use strict' - -const helpers = require('@envage/water-abstraction-helpers') -const date = require('./date') - -const getUniqueKey = agreement => - `${agreement.startDate}:${agreement.endDate}:${agreement.agreementCode}` - -const mapAgreement = chargeAgreement => { - // Start date is the later of the agreement start date or the - // charge version start date. - const startDate = date.getMaxDate([ - date.mapNaldDate(chargeAgreement.EFF_ST_DATE), - date.mapNaldDate(chargeAgreement.charge_version_start_date) - ]) - - // End date is the earlier of the agreement end date or the - // charge version end date. Either can be null. - const endDate = date.getMinDate([ - date.mapNaldDate(chargeAgreement.EFF_END_DATE), - date.mapNaldDate(chargeAgreement.charge_version_end_date) - ]) - - return { - agreementCode: chargeAgreement.AFSA_CODE, - startDate, - endDate - } -} - -/** - * Maps element-level agreements to a single charge-level agreement - * with the max possible date range - * @param {Array} tptAgreements - * @returns {Object} - */ -const mapElementLevelAgreements = tptAgreements => { - const startDates = tptAgreements.map(row => date.mapNaldDate(row.EFF_ST_DATE)) - const endDates = tptAgreements.map(row => date.mapNaldDate(row.EFF_END_DATE)) - return { - ...tptAgreements[0], - EFF_ST_DATE: date.mapIsoDateToNald(date.getMinDate(startDates)), - EFF_END_DATE: date.mapIsoDateToNald(date.getMaxDate(endDates)) - } -} - -const mapTwoPartTariffAgreements = tptAgreements => { - const chargeVersionGroups = tptAgreements.reduce((group, item) => { - group[item.version_number] = group[item.version_number] ?? [] - group[item.version_number].push(item) - - return group - }, {}) - - const mappedGroups = {} - for (const key in chargeVersionGroups) { - mappedGroups[key] = mapElementLevelAgreements(chargeVersionGroups[key]) - } - return Object.values(mappedGroups) -} - -const mapAgreements = (tptAgreements, s130Agreements = []) => { - const mappedTptAgreements = mapTwoPartTariffAgreements(tptAgreements) - - // Map and de-duplicate identical agreements - const mapped = [...new Set([...mappedTptAgreements, ...s130Agreements].map(mapAgreement), - getUniqueKey)] - - // Group by agreement code - const groups = mapped.reduce((group, agreement) => { - group[agreement.agreementCode] = group[agreement.agreementCode] ?? [] - group[agreement.agreementCode].push(agreement) - - return group - }, {}) - - // For each group, merge history - const merged = Object.values(groups).map(group => - helpers.charging.mergeHistory( - group.sort((startDate1, startDate2) => { - if ((startDate1, startDate1.startDate) > (startDate2, startDate2.startDate)) { - return 1 - } else { - return -1 - } - }) - ) - ) - - return merged.flatMap(num => num) -} - -module.exports = { - mapAgreements -} diff --git a/src/modules/licence-import/transform/mappers/company-address.js b/src/modules/licence-import/transform/mappers/company-address.js deleted file mode 100644 index 793b79e9..00000000 --- a/src/modules/licence-import/transform/mappers/company-address.js +++ /dev/null @@ -1,109 +0,0 @@ -'use strict' - -const date = require('./date') -const roles = require('./roles') - -/** - * Gets the end date for a company address from licence version data - * @param {Object} row - from NALD licence/licence version data - * @param {String,Null} currentEnd - the current value of the end date in the accumulator - */ -const getEndDate = (row, currentEnd) => { - // Get all end dates for this row - const endDates = [row.EFF_END_DATE, row.EXPIRY_DATE, row.REV_DATE, row.LAPSED_DATE] - .map(date.mapNaldDate) - .filter(value => value) - - const arr = [date.getMinDate(endDates), currentEnd] - - return arr.includes(null) ? null : date.getMaxDate(arr) -} - -const getLicenceHolderAddresses = (licenceVersions, context) => { - // Sort licence versions by start date - const sorted = licenceVersions.sort((startDate1, startDate2) => { - if ((startDate1, date.mapNaldDate(startDate1.EFF_ST_DATE)) > (startDate2, date.mapNaldDate(startDate2.EFF_ST_DATE))) { - return 1 - } else { - return -1 - } - }) - - // Get the widest date range for each address - const mapped = {} - for (const row of sorted) { - const id = row.ACON_AADD_ID - const currentStart = mapped[id]?.startDate - const currentEnd = mapped[id]?.endDate - mapped[id] = { - role: roles.ROLE_LICENCE_HOLDER, - startDate: date.getMinDate([date.mapNaldDate(row.EFF_ST_DATE), currentStart]), - endDate: getEndDate(row, currentEnd), - address: context.addresses[row.FGAC_REGION_CODE][id] - } - } - return Object.values(mapped) -} - -const getBillingAddresses = (chargeVersions, context) => { - const grouped = chargeVersions.reduce((group, row) => { - group[row.ACON_AADD_ID] = group[row.ACON_AADD_ID] ?? [] - group[row.ACON_AADD_ID].push(row) - - return group - }, {}) - return Object.values(grouped).map(addressGroup => { - const { FGAC_REGION_CODE: regionCode, ACON_AADD_ID: addressId } = addressGroup[0] - const dates = addressGroup.map(row => date.mapTransferDate(row.IAS_XFER_DATE)) - return { - role: roles.ROLE_BILLING, - startDate: date.getMinDate(dates), - endDate: null, - address: context.addresses[regionCode][addressId] - } - }) -} - -const getLicenceRoleAddresses = (licenceRoles, context) => { - // Group by roles with the same address and role - let grouped = {} - if (licenceRoles) { - grouped = licenceRoles.reduce((group, item) => { - const groupingKey = `${item.FGAC_REGION_CODE}.${item.ACON_AADD_ID}.${item.ALRT_CODE}` - group[groupingKey] = group[groupingKey] ?? [] - group[groupingKey].push(item) - - return group - }, {}) - } - return Object.values(grouped).map(addressGroup => { - const { FGAC_REGION_CODE: regionCode, ACON_AADD_ID: addressId, ALRT_CODE: roleCode } = addressGroup[0] - const startDates = addressGroup.map(row => date.mapNaldDate(row.EFF_ST_DATE)) - const endDates = addressGroup.map(row => date.mapNaldDate(row.EFF_END_DATE)) - return { - role: roles.naldRoles.get(roleCode), - startDate: date.getMinDate(startDates), - endDate: date.getMaxDate(endDates), - address: context.addresses[regionCode][addressId] - } - }) -} - -/** - * Gets an array of the company addresses to import - * @param {Array} licenceVersions - from NALD licence/licence version data - * @param {Array} chargeVersions - from NALD charge version data - * @param {Object} context - contains company/contact/address data - * @return {Array} an array of company addresses - */ -const mapCompanyAddresses = (licenceVersions, chargeVersions, licenceRoles, context) => { - return [ - ...getLicenceHolderAddresses(licenceVersions, context), - ...getBillingAddresses(chargeVersions, context), - ...getLicenceRoleAddresses(licenceRoles, context) - ] -} - -module.exports = { - mapCompanyAddresses -} diff --git a/src/modules/licence-import/transform/mappers/company-contact.js b/src/modules/licence-import/transform/mappers/company-contact.js deleted file mode 100644 index 8ea49b52..00000000 --- a/src/modules/licence-import/transform/mappers/company-contact.js +++ /dev/null @@ -1,44 +0,0 @@ -const date = require('./date') -const roles = require('./roles') - -const getLicenceHolderContact = (contact, licenceVersions) => { - const startDates = licenceVersions.map(row => date.mapNaldDate(row.EFF_ST_DATE)) - return { - role: roles.ROLE_LICENCE_HOLDER, - startDate: date.getMinDate(startDates), - endDate: null, - contact - } -} - -const getBillingContact = (contact, chargeVersions) => { - const startDates = chargeVersions.map(row => date.mapTransferDate(row.IAS_XFER_DATE)) - return { - role: roles.ROLE_BILLING, - startDate: date.getMinDate(startDates), - endDate: null, - contact - } -} - -const mapCompanyContacts = (contact, licenceVersions, chargeVersions) => { - if (contact === null) { - return [] - } - - const contacts = [] - - if (licenceVersions.length > 0) { - contacts.push(getLicenceHolderContact(contact, licenceVersions)) - } - - if (chargeVersions.length > 0) { - contacts.push(getBillingContact(contact, chargeVersions)) - } - - return contacts -} - -module.exports = { - mapCompanyContacts -} diff --git a/src/modules/licence-import/transform/mappers/company.js b/src/modules/licence-import/transform/mappers/company.js deleted file mode 100644 index db9d156f..00000000 --- a/src/modules/licence-import/transform/mappers/company.js +++ /dev/null @@ -1,37 +0,0 @@ -const str = require('./str') - -/** - * Maps a party object to a full name for organisation/person - * @param {Object} party - * @return {String} - */ -const mapName = party => { - const firstNameKey = party.FORENAME === 'null' ? 'INITIALS' : 'FORENAME' - - const parts = [ - party.SALUTATION, - party[firstNameKey], - party.NAME - ] - - return parts - .map(str.mapNull) - .filter(value => value) - .join(' ') -} - -/** - * Maps NALD party to CRM company - * @param {Object} NALD party - * @return {Object} - */ -const mapCompany = party => ({ - type: party.APAR_TYPE === 'PER' ? 'person' : 'organisation', - name: mapName(party), - externalId: `${party.FGAC_REGION_CODE}:${party.ID}`, - _nald: party -}) - -module.exports = { - mapCompany -} diff --git a/src/modules/licence-import/transform/mappers/contact.js b/src/modules/licence-import/transform/mappers/contact.js deleted file mode 100644 index cd1ecbcf..00000000 --- a/src/modules/licence-import/transform/mappers/contact.js +++ /dev/null @@ -1,24 +0,0 @@ -const str = require('./str') - -/** - * Maps NALD party to CRM contact - * @param {Object} NALD party - * @return {Object} - */ -const mapContact = party => { - if (party.APAR_TYPE === 'ORG') { - return null - } - return { - salutation: str.mapNull(party.SALUTATION), - initials: str.mapNull(party.INITIALS), - firstName: str.mapNull(party.FORENAME), - lastName: str.mapNull(party.NAME), - externalId: `${party.FGAC_REGION_CODE}:${party.ID}`, - _nald: party - } -} - -module.exports = { - mapContact -} diff --git a/src/modules/licence-import/transform/mappers/date.js b/src/modules/licence-import/transform/mappers/date.js deleted file mode 100644 index 27fffd4d..00000000 --- a/src/modules/licence-import/transform/mappers/date.js +++ /dev/null @@ -1,54 +0,0 @@ -const moment = require('moment') -const DATE_FORMAT = 'YYYY-MM-DD' -const NALD_FORMAT = 'DD/MM/YYYY' -const NALD_TRANSFER_FORMAT = 'DD/MM/YYYY HH:mm:ss' - -const mapNaldDate = str => { - if (str === 'null') { - return null - } - return moment(str, NALD_FORMAT).format(DATE_FORMAT) -} - -const mapIsoDateToNald = str => { - if (str === null) { - return 'null' - } - return moment(str, DATE_FORMAT).format(NALD_FORMAT) -} - -const getSortedDates = arr => { - const moments = arr - .map(str => moment(str, DATE_FORMAT)) - .filter(m => m.isValid()) - - const sorted = moments.sort(function (startDate1, startDate2) { - return startDate1 - startDate2 - }) - return sorted -} - -const getMinDate = arr => { - const sorted = getSortedDates(arr) - return sorted.length === 0 ? null : sorted[0].format(DATE_FORMAT) -} - -const getMaxDate = arr => { - const sorted = getSortedDates(arr) - return sorted.length === 0 ? null : sorted[sorted.length - 1].format(DATE_FORMAT) -} - -const mapTransferDate = str => - moment(str, NALD_TRANSFER_FORMAT).format(DATE_FORMAT) - -const getPreviousDay = str => - moment(str, DATE_FORMAT).subtract(1, 'day').format(DATE_FORMAT) - -module.exports = { - mapNaldDate, - getMinDate, - getMaxDate, - mapTransferDate, - getPreviousDay, - mapIsoDateToNald -} diff --git a/src/modules/licence-import/transform/mappers/document.js b/src/modules/licence-import/transform/mappers/document.js deleted file mode 100644 index afb10117..00000000 --- a/src/modules/licence-import/transform/mappers/document.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -const mapLicenceToDocument = licence => { - const { startDate, endDate, externalId } = licence - return { - documentRef: licence.licenceNumber, - startDate, - endDate, - externalId, - roles: [], - _nald: licence._nald - } -} - -module.exports = { - mapLicenceToDocument -} diff --git a/src/modules/licence-import/transform/mappers/index.js b/src/modules/licence-import/transform/mappers/index.js deleted file mode 100644 index d77b636e..00000000 --- a/src/modules/licence-import/transform/mappers/index.js +++ /dev/null @@ -1,17 +0,0 @@ -module.exports = { - address: require('./address'), - agreement: require('./agreement'), - company: require('./company'), - companyAddress: require('./company-address'), - companyContact: require('./company-contact'), - contact: require('./contact'), - date: require('./date'), - document: require('./document'), - invoiceAccount: require('./invoice-account'), - licence: require('./licence'), - licenceVersion: require('./licence-version'), - licencePurpose: require('./licence-purpose'), - party: require('./party'), - role: require('./role'), - purposeCondition: require('./purpose-condition') -} diff --git a/src/modules/licence-import/transform/mappers/invoice-account.js b/src/modules/licence-import/transform/mappers/invoice-account.js deleted file mode 100644 index 034a840f..00000000 --- a/src/modules/licence-import/transform/mappers/invoice-account.js +++ /dev/null @@ -1,82 +0,0 @@ -const helpers = require('@envage/water-abstraction-helpers') -const date = require('./date') -const roles = require('./roles') - -const mapInvoiceAccount = chargeVersion => ({ - invoiceAccountNumber: chargeVersion.IAS_CUST_REF -}) - -const getNormalisedName = str => { - return str.trim().toLowerCase().replace(/ltd\.?$/, 'limited') -} - -/** - * Gets the agent company ID for the invoice account row - * @param {Object} row - single row from invoice account query - * @return {String|Null} - agent company ID or null if no agent - */ -const getAgentCompanyExternalId = row => { - const { - licence_holder_party_id: licenceHolderPartyId, - licence_holder_party_name: licenceHolderPartyName, - invoice_account_party_name: invoiceAccountPartyName, - FGAC_REGION_CODE: regionCode, - ACON_APAR_ID: invoiceAccountPartyId - } = row - - const isDifferentId = licenceHolderPartyId !== invoiceAccountPartyId - const isDifferentName = getNormalisedName(licenceHolderPartyName) !== getNormalisedName(invoiceAccountPartyName) - - return isDifferentId && isDifferentName ? `${regionCode}:${invoiceAccountPartyId}` : null -} - -const mapInvoiceAccountAddresses = (iasAccounts, context) => { - // Sort group by transfer date - const sorted = iasAccounts.sort((startDate1, startDate2) => { - if ((startDate1, date.mapTransferDate(startDate1.IAS_XFER_DATE)) > (startDate2, date.mapTransferDate(startDate2.IAS_XFER_DATE))) { - return 1 - } else { - return -1 - } - }) - - // Map to new data structure - const addresses = sorted.map((row, i, arr) => ({ - role: roles.ROLE_BILLING, - startDate: date.mapTransferDate(row.IAS_XFER_DATE), - endDate: i === arr.length - 1 ? null : date.getPreviousDay(date.mapTransferDate(arr[i + 1].IAS_XFER_DATE)), - address: context.addresses[row.FGAC_REGION_CODE][row.ACON_AADD_ID], - agentCompany: { - externalId: getAgentCompanyExternalId(row) - } - })) - - // Merge on date range - return helpers.charging.mergeHistory(addresses) -} - -const mapInvoiceAccounts = (iasAccounts, context) => { - // Group by IAS customer ref (invoice account number) - const groups = iasAccounts.reduce((group, row) => { - group[row.IAS_CUST_REF] = group[row.IAS_CUST_REF] ?? [] - group[row.IAS_CUST_REF].push(row) - - return group - }, {}) - - return Object.values(groups).map(group => { - const addresses = mapInvoiceAccountAddresses(group, context) - - return { - invoiceAccountNumber: group[0].IAS_CUST_REF, - startDate: addresses[0].startDate, - endDate: null, - addresses - } - }) -} - -module.exports = { - mapInvoiceAccount, - mapInvoiceAccounts -} diff --git a/src/modules/licence-import/transform/mappers/licence-purpose.js b/src/modules/licence-import/transform/mappers/licence-purpose.js deleted file mode 100644 index d5c62967..00000000 --- a/src/modules/licence-import/transform/mappers/licence-purpose.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict' - -const dateMapper = require('./date') -const nald = require('@envage/water-abstraction-helpers').nald - -const mapLicencePurpose = data => { - const purpose = { - issue: +data.AABV_ISSUE_NO, - increment: +data.AABV_INCR_NO, - purposePrimary: data.APUR_APPR_CODE, - purposeSecondary: data.APUR_APSE_CODE, - purposeUse: data.APUR_APUS_CODE, - abstractionPeriodStartDay: +data.PERIOD_ST_DAY, - abstractionPeriodStartMonth: +data.PERIOD_ST_MONTH, - abstractionPeriodEndDay: +data.PERIOD_END_DAY, - abstractionPeriodEndMonth: +data.PERIOD_END_MONTH, - timeLimitedStartDate: dateMapper.mapNaldDate(data.TIMELTD_ST_DATE), - timeLimitedEndDate: dateMapper.mapNaldDate(data.TIMELTD_END_DATE), - notes: nald.stringNullToNull(data.NOTES), - instantQuantity: nald.stringNullToNull(data.INST_QTY) === null - ? null - : +data.INST_QTY, - hourlyQuantity: nald.stringNullToNull(data.HOURLY_QTY) === null - ? null - : +data.HOURLY_QTY, - dailyQuantity: nald.stringNullToNull(data.DAILY_QTY) === null - ? null - : +data.DAILY_QTY, - annualQuantity: nald.stringNullToNull(data.ANNUAL_QTY) === null - ? null - : +data.ANNUAL_QTY, - externalId: `${data.FGAC_REGION_CODE}:${data.ID}` - } - - return purpose -} - -module.exports = { - mapLicencePurpose -} diff --git a/src/modules/licence-import/transform/mappers/licence-version.js b/src/modules/licence-import/transform/mappers/licence-version.js deleted file mode 100644 index cdbcd2c8..00000000 --- a/src/modules/licence-import/transform/mappers/licence-version.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict' - -const dateMapper = require('./date') - -const statuses = { - CURR: 'current', - SUPER: 'superseded', - DRAFT: 'draft' -} - -const createExternalId = licenceVersionData => { - const { FGAC_REGION_CODE, AABL_ID, ISSUE_NO, INCR_NO } = licenceVersionData - return `${FGAC_REGION_CODE}:${AABL_ID}:${ISSUE_NO}:${INCR_NO}` -} - -/** - * Creates a mapped licence version object in a preferred format over - * the NALD format including an array of any licence purposes. - * - * @param {Object} licenceVersionData THe NALD database row for the licence version - * @param {Array} mappedPurposes An array of already mapped licence purposes - * @param {Array} mappedConditions An array of conditions - */ -const mapLicenceVersion = (licenceVersionData, mappedPurposes = [], mappedConditions = []) => { - const issue = +licenceVersionData.ISSUE_NO - const increment = +licenceVersionData.INCR_NO - return { - issue, - increment, - status: statuses[licenceVersionData.STATUS], - startDate: dateMapper.mapNaldDate(licenceVersionData.EFF_ST_DATE), - endDate: dateMapper.mapNaldDate(licenceVersionData.EFF_END_DATE), - externalId: createExternalId(licenceVersionData), - purposes: mappedPurposes.filter(p => { - return p.issue === issue && p.increment === increment - }).map(p => { - return { ...p, conditions: mappedConditions.filter(c => c.purposeExternalId === p.externalId) } - }) - } -} - -module.exports = { - mapLicenceVersion -} diff --git a/src/modules/licence-import/transform/mappers/licence.js b/src/modules/licence-import/transform/mappers/licence.js deleted file mode 100644 index c7c2c987..00000000 --- a/src/modules/licence-import/transform/mappers/licence.js +++ /dev/null @@ -1,104 +0,0 @@ -'use strict' - -const date = require('./date') -const str = require('./str') - -const regions = { - AN: 'Anglian', - MD: 'Midlands', - NO: 'Northumbria', - NW: 'North West', - SO: 'Southern', - SW: 'South West (incl Wessex)', - TH: 'Thames', - WL: 'Wales', - YO: 'Yorkshire' -} - -const getRegionData = licenceData => { - const historicalAreaCode = licenceData.AREP_AREA_CODE - const regionPrefix = licenceData.AREP_EIUC_CODE.substr(0, 2) - const regionalChargeArea = regions[regionPrefix] - const standardUnitChargeCode = licenceData.AREP_SUC_CODE - const localEnvironmentAgencyPlanCode = licenceData.AREP_LEAP_CODE - return { historicalAreaCode, regionalChargeArea, standardUnitChargeCode, localEnvironmentAgencyPlanCode } -} - -const isNotDraftLicenceVersion = licenceVersion => licenceVersion.STATUS !== 'DRAFT' - -const getLicenceVersionStartDate = licenceVersion => date.mapNaldDate(licenceVersion.EFF_ST_DATE) - -/** - * Maps the licence and licence versions to a start date. - * If the licence ORIG_EFF_DATE is not null, this is used. - * Otherwise the start date of the earliest non-draft licence - * version is used. - * - * @param {Object} licence - * @param {Object} licenceVersions - * @return {String} YYYY-MM-DD - */ -const mapStartDate = (licence, licenceVersions) => { - if (licence.ORIG_EFF_DATE !== 'null') { - return date.mapNaldDate(licence.ORIG_EFF_DATE) - } - - return licenceVersions - .filter(isNotDraftLicenceVersion) - .map(getLicenceVersionStartDate) - .sort() - .shift() -} - -const mapLicence = (licence, licenceVersions) => { - const endDates = [ - licence.EXPIRY_DATE, - licence.REV_DATE, - licence.LAPSED_DATE - ] - .map(str.mapNull) - .filter(value => value) - .map(date.mapNaldDate) - - return { - licenceNumber: licence.LIC_NO, - startDate: mapStartDate(licence, licenceVersions), - endDate: date.getMinDate(endDates), - documents: [], - agreements: [], - externalId: `${licence.FGAC_REGION_CODE}:${licence.ID}`, - isWaterUndertaker: licence.AREP_EIUC_CODE.endsWith('SWC'), - regions: getRegionData(licence), - regionCode: parseInt(licence.FGAC_REGION_CODE, 10), - expiredDate: date.mapNaldDate(licence.EXPIRY_DATE), - lapsedDate: date.mapNaldDate(licence.LAPSED_DATE), - revokedDate: date.mapNaldDate(licence.REV_DATE), - _nald: licence - } -} - -/** - * Deep cleans up any _nald keys in a deep object - * @param {Object} - * @return {Object} - */ -const omitNaldData = value => { - if (Array.isArray(value)) { - return value.map(omitNaldData) - } - if (typeof value === 'object' && value !== null) { - const val = { ...value } - delete val._nald - const mappedVal = {} - for (const key in val) { - mappedVal[key] = omitNaldData(val[key]) - } - return mappedVal - } - return value -} - -module.exports = { - mapLicence, - omitNaldData -} diff --git a/src/modules/licence-import/transform/mappers/party.js b/src/modules/licence-import/transform/mappers/party.js deleted file mode 100644 index 64a568c8..00000000 --- a/src/modules/licence-import/transform/mappers/party.js +++ /dev/null @@ -1,26 +0,0 @@ -const contact = require('./contact') -const company = require('./company') -const { createRegionSkeleton } = require('./region-skeleton') - -/** - * Maps parties to a hash containing companies and contacts - * @param {Array} parties - * @return {Object} - */ - -const mapParties = (parties) => { - const mappedParties = createRegionSkeleton() - for (const party of parties) { - const value = { - company: company.mapCompany(party), - contact: contact.mapContact(party) - } - mappedParties[party.FGAC_REGION_CODE][party.ID] = value - } - - return mappedParties -} - -module.exports = { - mapParties -} diff --git a/src/modules/licence-import/transform/mappers/purpose-condition.js b/src/modules/licence-import/transform/mappers/purpose-condition.js deleted file mode 100644 index a6a8617c..00000000 --- a/src/modules/licence-import/transform/mappers/purpose-condition.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -const mapPurposeConditionFromNALD = data => { - return { - code: data.ACIN_CODE, - subcode: data.ACIN_SUBCODE, - param1: data.PARAM1 === 'null' - ? null - : data.PARAM1, - param2: data.PARAM2 === 'null' - ? null - : data.PARAM2, - notes: data.TEXT === 'null' - ? null - : data.TEXT, - purposeExternalId: `${data.FGAC_REGION_CODE}:${data.AABP_ID}`, - externalId: `${data.ID}:${data.FGAC_REGION_CODE}:${data.AABP_ID}` - } -} - -module.exports = { - mapPurposeConditionFromNALD -} diff --git a/src/modules/licence-import/transform/mappers/region-skeleton.js b/src/modules/licence-import/transform/mappers/region-skeleton.js deleted file mode 100644 index 5bf4c27b..00000000 --- a/src/modules/licence-import/transform/mappers/region-skeleton.js +++ /dev/null @@ -1,14 +0,0 @@ -const createRegionSkeleton = () => ({ - 1: {}, - 2: {}, - 3: {}, - 4: {}, - 5: {}, - 6: {}, - 7: {}, - 8: {} -}) - -module.exports = { - createRegionSkeleton -} diff --git a/src/modules/licence-import/transform/mappers/role.js b/src/modules/licence-import/transform/mappers/role.js deleted file mode 100644 index 443fd347..00000000 --- a/src/modules/licence-import/transform/mappers/role.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict' - -const date = require('./date') -const roles = require('./roles') - -const mapLicenceHolderRoles = (document, licenceVersions, context) => licenceVersions - .filter(licenceVersion => isLicenceVersionForImport(licenceVersion, licenceVersions)) - .map(licenceVersion => mapLicenceHolderRole(document, licenceVersion, context)) - -const isLicenceVersionForImport = (licenceVersion, licenceVersions) => - !isLicenceVersionDraft(licenceVersion) && - !isLicenceVersionReplaced(licenceVersion, licenceVersions) - -const isLicenceVersionDraft = licenceVersion => licenceVersion.STATUS === 'DRAFT' - -const isLicenceVersionReplaced = (licenceVersion, licenceVersions) => - licenceVersions.some(comparisonLicenceVersion => { - const isSameStartDate = comparisonLicenceVersion.EFF_ST_DATE === licenceVersion.EFF_ST_DATE - const isFollowingVersion = compareLicenceVersions(licenceVersion, comparisonLicenceVersion) === 1 - return isSameStartDate && isFollowingVersion - }) - -const compareLicenceVersions = (licenceVersionA, licenceVersionB) => { - const versionA = getVersion(licenceVersionA) - const versionB = getVersion(licenceVersionB) - if (versionA.issue === versionB.issue) { - if (versionA.increment === versionB.increment) { - return 0 - } - return versionA.increment > versionB.increment ? -1 : +1 - } - return versionA.issue > versionB.issue ? -1 : +1 -} - -const getVersion = licenceVersion => ({ - issue: parseInt(licenceVersion.ISSUE_NO), - increment: parseInt(licenceVersion.INCR_NO) -}) - -/** - * Creates an initial document role for the licence holder - * excluding party / address / company - this will be added - * after the roles are merged by date range - * @param {Object} document - * @param {Object} licenceVersion - * @return {Object} document role - */ -const mapLicenceHolderRole = (document, licenceVersion, context) => ({ - role: 'licenceHolder', - startDate: date.getMaxDate([ - document.startDate, - date.mapNaldDate(licenceVersion.EFF_ST_DATE) - ]), - endDate: date.getMinDate([ - document.endDate, - date.mapNaldDate(licenceVersion.EFF_END_DATE) - ]), - ...context.parties[licenceVersion.FGAC_REGION_CODE][licenceVersion.ACON_APAR_ID], - address: context.addresses[licenceVersion.FGAC_REGION_CODE][licenceVersion.ACON_AADD_ID] -}) - -const mapLicenceRole = (row, context) => ({ - role: roles.naldRoles.get(row.ALRT_CODE), - startDate: date.mapNaldDate(row.EFF_ST_DATE), - endDate: date.mapNaldDate(row.EFF_END_DATE), - invoiceAccount: null, - contact: null, - ...context.parties[row.FGAC_REGION_CODE][row.ACON_APAR_ID], - address: context.addresses[row.FGAC_REGION_CODE][row.ACON_AADD_ID] -}) - -/** - * Whether this role should be imported - * @param {Object} role - nald licence role row data - * @return {Boolean} - */ -const isRoleForImport = role => roles.naldRoles.get(role.ALRT_CODE) === roles.ROLE_RETURNS_TO - -/** - * Maps NALD roles (returns to contact) - * @param {Object} document - * @param {Array} roles - array of roles loaded from NALD - */ -const mapLicenceRoles = (licenceRoles, context) => licenceRoles - .filter(isRoleForImport) - .map(role => mapLicenceRole(role, context)) - -module.exports = { - mapLicenceHolderRoles, - mapLicenceRoles -} diff --git a/src/modules/licence-import/transform/mappers/roles.js b/src/modules/licence-import/transform/mappers/roles.js deleted file mode 100644 index c2786510..00000000 --- a/src/modules/licence-import/transform/mappers/roles.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -const ROLE_LICENCE_HOLDER = 'licenceHolder' -const ROLE_BILLING = 'billing' -const ROLE_RETURNS_TO = 'returnsTo' - -/** - * Maps the NALD role codes to the CRM v2 role name - * @type {Map} - */ -const naldRoles = new Map() -naldRoles.set('RT', ROLE_RETURNS_TO) - -module.exports = { - ROLE_LICENCE_HOLDER, - ROLE_BILLING, - ROLE_RETURNS_TO, - naldRoles -} diff --git a/src/modules/licence-import/transform/mappers/str.js b/src/modules/licence-import/transform/mappers/str.js deleted file mode 100644 index 0821c5f6..00000000 --- a/src/modules/licence-import/transform/mappers/str.js +++ /dev/null @@ -1,5 +0,0 @@ -const mapNull = str => str === 'null' ? null : str - -module.exports = { - mapNull -} diff --git a/src/modules/mod-logs/controller.js b/src/modules/mod-logs/controller.js deleted file mode 100644 index 1d6663c6..00000000 --- a/src/modules/mod-logs/controller.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const ImportJob = require('./jobs/import.js') - -async function importModLogs (request, h) { - await request.messageQueue.deleteQueue(ImportJob.JOB_NAME) - await request.messageQueue.publish(ImportJob.createMessage()) - - return h.response().code(204) -} - -module.exports = { - importModLogs -} diff --git a/src/modules/mod-logs/controllers.js b/src/modules/mod-logs/controllers.js new file mode 100644 index 00000000..5aac3a94 --- /dev/null +++ b/src/modules/mod-logs/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function modLogs (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + modLogs +} diff --git a/src/modules/mod-logs/jobs/import.js b/src/modules/mod-logs/jobs/import.js deleted file mode 100644 index 2107e168..00000000 --- a/src/modules/mod-logs/jobs/import.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict' - -const { pool } = require('../../../lib/connectors/db.js') -const Queries = require('../lib/queries.js') - -const JOB_NAME = 'mod-logs.import' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - await pool.query(Queries.importModLogs) - await pool.query(Queries.linkLicencesToModLogs) - await pool.query(Queries.linkChargeVersionsToModLogs) - await pool.query(Queries.linkLicenceVersionsToModLogs) - await pool.query(Queries.linkReturnVersionsToModLogs) - await pool.query(Queries.updateReturnVersionReasons) - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (job) { - if (!job.failed) { - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) - } else { - global.GlobalNotifier.omg(`${JOB_NAME}: failed`) - } -} - -module.exports = { - JOB_NAME, - createMessage, - handler, - onComplete -} diff --git a/src/modules/mod-logs/plugin.js b/src/modules/mod-logs/plugin.js deleted file mode 100644 index 1bb97ee0..00000000 --- a/src/modules/mod-logs/plugin.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const cron = require('node-cron') - -const ImportJob = require('./jobs/import.js') - -const config = require('../../../config') - -async function register (server, _options) { - // Register import mod logs job - await server.messageQueue.subscribe(ImportJob.JOB_NAME, ImportJob.handler) - await server.messageQueue.onComplete(ImportJob.JOB_NAME, (executedJob) => { - return ImportJob.onComplete(executedJob) - }) - - // Set up import on cron job - cron.schedule(config.import.modLogs.schedule, async () => { - await server.messageQueue.publish(ImportJob.createMessage()) - }) -} - -module.exports = { - plugin: { - name: 'importModLogs', - dependencies: ['pgBoss'], - register - } -} diff --git a/src/modules/mod-logs/process-steps.js b/src/modules/mod-logs/process-steps.js new file mode 100644 index 00000000..96c0dc71 --- /dev/null +++ b/src/modules/mod-logs/process-steps.js @@ -0,0 +1,29 @@ +'use strict' + +const ImportStep = require('./steps/import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('mod-logs started') + + const startTime = currentTimeInNanoseconds() + + await ImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'mod-logs complete') + } catch (error) { + global.GlobalNotifier.oops('mod-logs failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/mod-logs/routes.js b/src/modules/mod-logs/routes.js index 7882aeb7..9b10bff1 100644 --- a/src/modules/mod-logs/routes.js +++ b/src/modules/mod-logs/routes.js @@ -1,12 +1,12 @@ 'use strict' -const controller = require('./controller') +const controllers = require('./controllers') const routes = [ { method: 'post', - handler: controller.importModLogs, - path: '/import/mod-logs' + handler: controllers.modLogs, + path: '/mod-logs' } ] diff --git a/src/modules/mod-logs/steps/import.js b/src/modules/mod-logs/steps/import.js new file mode 100644 index 00000000..824e7168 --- /dev/null +++ b/src/modules/mod-logs/steps/import.js @@ -0,0 +1,29 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const Queries = require('../lib/queries.js') + +async function go () { + try { + global.GlobalNotifier.omg('mod-logs.import started') + + const startTime = currentTimeInNanoseconds() + + await db.query(Queries.importModLogs) + await db.query(Queries.linkLicencesToModLogs) + await db.query(Queries.linkChargeVersionsToModLogs) + await db.query(Queries.linkLicenceVersionsToModLogs) + await db.query(Queries.linkReturnVersionsToModLogs) + await db.query(Queries.updateReturnVersionReasons) + + calculateAndLogTimeTaken(startTime, 'mod-logs.import complete') + } catch (error) { + global.GlobalNotifier.omfg('mod-logs.import errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/nald-data/controllers.js b/src/modules/nald-data/controllers.js new file mode 100644 index 00000000..6166f194 --- /dev/null +++ b/src/modules/nald-data/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function naldData (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + naldData +} diff --git a/src/modules/nald-data/process-steps.js b/src/modules/nald-data/process-steps.js new file mode 100644 index 00000000..9a730a8e --- /dev/null +++ b/src/modules/nald-data/process-steps.js @@ -0,0 +1,37 @@ +'use strict' + +const DownloadStep = require('./steps/download.js') +const ExtractStep = require('./steps/extract.js') +const ImportStep = require('./steps/import.js') +const SchemaStep = require('./steps/schema.js') +const SwapStep = require('./steps/swap.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('nald-data started') + + const startTime = currentTimeInNanoseconds() + + await DownloadStep.go() + await ExtractStep.go() + await SchemaStep.go() + await ImportStep.go() + await SwapStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'nald-data complete') + } catch (error) { + global.GlobalNotifier.oops('nald-data failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/nald-data/routes.js b/src/modules/nald-data/routes.js new file mode 100644 index 00000000..4367dabd --- /dev/null +++ b/src/modules/nald-data/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers.js') + +const routes = [ + { + method: 'post', + handler: controllers.naldData, + path: '/nald-data' + } +] + +module.exports = routes diff --git a/src/modules/nald-data/steps/download.js b/src/modules/nald-data/steps/download.js new file mode 100644 index 00000000..77971cee --- /dev/null +++ b/src/modules/nald-data/steps/download.js @@ -0,0 +1,29 @@ +'use strict' + +const ProcessHelper = require('@envage/water-abstraction-helpers').process + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const S3 = require('../../../lib/services/s3') + +async function go () { + try { + global.GlobalNotifier.omg('nald-data.download started') + + const startTime = currentTimeInNanoseconds() + + await ProcessHelper.execCommand("rm -rf './temp/'") + await ProcessHelper.execCommand("mkdir -p './temp/'") + await ProcessHelper.execCommand("mkdir -p './temp/NALD'") + + await S3.download('wal_nald_data_release/nald_enc.zip', './temp/nald_enc.zip') + + calculateAndLogTimeTaken(startTime, 'nald-data.download complete') + } catch (error) { + global.GlobalNotifier.omfg('nald-data.download errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/nald-data/steps/extract.js b/src/modules/nald-data/steps/extract.js new file mode 100644 index 00000000..a19f2392 --- /dev/null +++ b/src/modules/nald-data/steps/extract.js @@ -0,0 +1,36 @@ +'use strict' + +const ProcessHelper = require('@envage/water-abstraction-helpers').process + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') + +const config = require('../../../../config') + +async function go () { + try { + global.GlobalNotifier.omg('nald-data.extract started') + + const startTime = currentTimeInNanoseconds() + + await _extractArchive('./temp/nald_enc.zip', './temp/', config.import.nald.zipPassword) + await _extractArchive('./temp/NALD.zip', './temp/') + + calculateAndLogTimeTaken(startTime, 'nald-data.extract complete') + } catch (error) { + global.GlobalNotifier.omfg('nald-data.extract errored', error) + throw error + } +} + +async function _extractArchive (source, destination, password) { + let command = `7z x ${source} -o${destination}` + + if (password) { + command += ` -p${password}` + } + await ProcessHelper.execCommand(command) +} + +module.exports = { + go +} diff --git a/src/modules/nald-import/services/load-csv-service.js b/src/modules/nald-data/steps/import.js similarity index 50% rename from src/modules/nald-import/services/load-csv-service.js rename to src/modules/nald-data/steps/import.js index cf5a1020..2c344cbf 100644 --- a/src/modules/nald-import/services/load-csv-service.js +++ b/src/modules/nald-data/steps/import.js @@ -1,35 +1,17 @@ 'use strict' -const path = require('path') const fs = require('fs') +const ProcessHelper = require('@envage/water-abstraction-helpers').process const { promisify } = require('util') -const readFirstLine = require('firstline') - const readDir = promisify(fs.readdir) +const ReadFirstLine = require('firstline') const writeFile = promisify(fs.writeFile) -const config = require('../../../../config.js') -const { execCommand } = require('@envage/water-abstraction-helpers').process - -const constants = require('../lib/constants') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') -const finalPath = path.join(constants.LOCAL_TEMP_PATH, constants.CSV_DIRECTORY) +const config = require('../../../../config') -/** - * Get a list of files to import - * @return {Promise} resolves with array of files - */ -async function getImportFiles () { - const files = await readDir(finalPath) - const excludeList = ['NALD_RET_LINES_AUDIT', 'NALD_RET_FORM_LOGS_AUDIT'] - return files.filter((file) => { - const table = file.split('.')[0] - const extn = file.split('.')[1] - return !(table.length === 0 || excludeList.includes(table)) && extn === 'txt' - }) -} - -const indexableFieldsList = [ +const INDEXABLE_FIELDS_LIST = [ 'ID', 'LIC_NO', 'FGAC_REGION_CODE', @@ -66,29 +48,26 @@ const indexableFieldsList = [ 'ARFL_DATE_FROM' ] -const intersection = (arr, ...args) => { - return arr.filter(item => args.every(arr => arr.includes(item))) -} +async function go () { + try { + global.GlobalNotifier.omg('nald-data.import started') -/** - * Gets SQL for indexes to add to the supplied table - * @param {String} schemaName - * @param {String} table - * @param {Array} cols - * @return {String} - */ -const getIndexes = (schemaName, table, cols) => { - const indexableFields = intersection(indexableFieldsList, cols) - if (table === 'NALD_RET_LINES') { - // NALD_RET_LINES is large so more care is required when creating indexes which can take a long time to create - return `\nCREATE INDEX idx_nald_ret_lines_id_and_region ON ${schemaName}."NALD_RET_LINES" ("ARFL_ARTY_ID", "FGAC_REGION_CODE");` - } else { - let str = '' - for (const field of indexableFields) { - const indexName = `${table}_${field}_index` - str += `\nCREATE INDEX "${indexName}" ON ${schemaName}."${table}" ("${field}");` + const startTime = currentTimeInNanoseconds() + + const files = await _importFiles() + const sqlPath = './temp/NALD/sql.sql' + + for (const file of files) { + const sql = await _generateCreateTableSqlFromFile(file, 'import_temp') + + await writeFile(sqlPath, sql) + await ProcessHelper.execCommand(`psql ${config.pg.connectionString} < ${sqlPath}`) } - return str + + calculateAndLogTimeTaken(startTime, 'nald-data.import complete') + } catch (error) { + global.GlobalNotifier.omfg('nald-data.import errored', error) + throw error } } @@ -97,13 +76,13 @@ const getIndexes = (schemaName, table, cols) => { * @param {String} file - the CSV file to import * @return {String} the SQL statements to import the CSV file */ -async function getSqlForFile (file, schemaName) { +async function _generateCreateTableSqlFromFile (file, schemaName) { const table = file.split('.')[0] - const tablePath = path.join(finalPath, `${table}.txt`) - const line = await readFirstLine(tablePath) + const tablePath = `./temp/NALD/${table}.txt` + const line = await ReadFirstLine(tablePath) const cols = line.split(',') - let tableCreate = `\n CREATE TABLE if not exists ${schemaName}."${table}" (` + let tableCreate = `\n CREATE TABLE IF NOT EXISTS import_temp."${table}" (` for (let col = 0; col < cols.length; col++) { tableCreate += `"${cols[col]}" varchar` @@ -114,29 +93,58 @@ async function getSqlForFile (file, schemaName) { } } - tableCreate += `\n \\copy ${schemaName}."${table}" FROM '${finalPath}/${file}' HEADER DELIMITER ',' CSV;` - tableCreate += getIndexes(schemaName, table, cols) + tableCreate += `\n \\copy import_temp."${table}" FROM './temp/NALD/${file}' HEADER DELIMITER ',' CSV;` + tableCreate += _generateCreateIndexesSql(schemaName, table, cols) return tableCreate -}; +} /** - * Imports a single CSV file - * @param {String} the CSV filename - * @return {Promise} + * Gets SQL for indexes to add to the supplied table + * @param {String} schemaName + * @param {String} table + * @param {Array} cols + * @return {String} */ -async function importFiles (schemaName) { - const files = await getImportFiles() - const sqlPath = path.join(finalPath, 'sql.sql') +function _generateCreateIndexesSql (table, cols) { + const indexableFields = _intersection(INDEXABLE_FIELDS_LIST, cols) - for (const file of files) { - const sql = await getSqlForFile(file, schemaName) + if (table === 'NALD_RET_LINES') { + // NALD_RET_LINES is large so more care is required when creating indexes which can take a long time to create + return '\nCREATE INDEX idx_nald_ret_lines_id_and_region ON import_temp."NALD_RET_LINES" ("ARFL_ARTY_ID", "FGAC_REGION_CODE");' + } else { + let str = '' - await writeFile(sqlPath, sql) - await execCommand(`psql ${config.pg.connectionString} < ${sqlPath}`) + for (const field of indexableFields) { + const indexName = `${table}_${field}_index` + str += `\nCREATE INDEX "${indexName}" ON import_temp."${table}" ("${field}");` + } + return str } } +function _intersection (arr, ...args) { + return arr.filter(item => args.every(arr => arr.includes(item))) +} + +/** + * Get a list of files to import + * @return {Promise} resolves with array of files + */ +async function _importFiles () { + const files = await readDir('./temp/NALD') + const excludeList = ['NALD_RET_LINES_AUDIT', 'NALD_RET_FORM_LOGS_AUDIT'] + + const filteredFiles = files.filter((file) => { + const table = file.split('.')[0] + const extension = file.split('.')[1] + + return !(table.length === 0 || excludeList.includes(table)) && extension === 'txt' + }) + + return filteredFiles +} + module.exports = { - importFiles + go } diff --git a/src/modules/nald-data/steps/schema.js b/src/modules/nald-data/steps/schema.js new file mode 100644 index 00000000..1492e166 --- /dev/null +++ b/src/modules/nald-data/steps/schema.js @@ -0,0 +1,24 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') + +async function go () { + try { + global.GlobalNotifier.omg('nald-data.schema started') + + const startTime = currentTimeInNanoseconds() + + await db.query('DROP SCHEMA IF EXISTS import_temp CASCADE;') + await db.query('CREATE SCHEMA IF NOT EXISTS import_temp;') + + calculateAndLogTimeTaken(startTime, 'nald-data.schema complete') + } catch (error) { + global.GlobalNotifier.omfg('nald-data.schema errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/nald-data/steps/swap.js b/src/modules/nald-data/steps/swap.js new file mode 100644 index 00000000..64b61124 --- /dev/null +++ b/src/modules/nald-data/steps/swap.js @@ -0,0 +1,27 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const ProcessHelper = require('@envage/water-abstraction-helpers').process + +async function go () { + try { + global.GlobalNotifier.omg('nald-data.swap started') + + const startTime = currentTimeInNanoseconds() + + await db.query('DROP SCHEMA IF EXISTS import CASCADE;') + await db.query('ALTER SCHEMA import_temp RENAME TO import;') + + await ProcessHelper.execCommand("rm -rf './temp/'") + + calculateAndLogTimeTaken(startTime, 'nald-data.swap complete') + } catch (error) { + global.GlobalNotifier.omfg('nald-data.swap errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/nald-import/controller.js b/src/modules/nald-import/controller.js deleted file mode 100644 index 881bc00f..00000000 --- a/src/modules/nald-import/controller.js +++ /dev/null @@ -1,139 +0,0 @@ -'use strict' - -const Boom = require('@hapi/boom') - -const { buildReturnsPacket } = require('./transform-returns') -const { getLicenceJson } = require('./transform-permit') -const importLicenceJob = require('./jobs/import-licence.js') -const s3DownloadJob = require('./jobs/s3-download.js') - -const { getFormats, getLogs, getLogLines } = require('./lib/nald-queries/returns') - -/** - * For test purposes, builds licence from the data in the NALD import - * tables. This is used in the NALD import unit test - * @param {String} request.query.filter - a JSON encoded string with property 'licenceNumber' - */ -const getLicence = async (request, h) => { - try { - const filter = JSON.parse(request.query.filter) - const data = await getLicenceJson(filter.licenceNumber) - - if (data) { - return data - } - return Boom.notFound('The requested licence number could not be found') - } catch (err) { - throw Boom.boomify(err, { statusCode: 400 }) - } -} - -/** - * For test purposes, builds returns data - * @param {String} request.query.filter - a JSON encoded string with property 'licenceNumber' - */ -const getReturns = async (request, h) => { - try { - const filter = JSON.parse(request.query.filter) - const data = await buildReturnsPacket(filter.licenceNumber) - - if (data) { - return data - } - return Boom.notFound('The requested licence number could not be found') - } catch (err) { - throw Boom.boomify(err, { statusCode: 400 }) - } -} - -/** - * For test purposes, gets returns formats for given licence number - * @param {String} request.query.filter - JSON encoded filter - */ -const getReturnsFormats = async (request, h) => { - try { - const filter = JSON.parse(request.query.filter) - const data = await getFormats(filter.licenceNumber) - - return data - } catch (err) { - throw Boom.boomify(err, { statusCode: 400 }) - } -} - -/** - * For test purposes, gets returns formats for given licence number - * @param {String} request.query - JSON encoded filter - */ -const getReturnsLogs = async (request, h) => { - try { - const filter = JSON.parse(request.query.filter) - const { formatId, regionCode } = filter - const data = await getLogs(formatId, regionCode) - return data - } catch (err) { - throw Boom.boomify(err, { statusCode: 400 }) - } -} - -/** - * For test purposes, gets returns formats for given licence number - * @param {String} request.query - JSON encoded filter - */ -const getReturnsLogLines = async (request, h) => { - try { - const filter = JSON.parse(request.query.filter) - const { formatId, regionCode, dateFrom } = filter - const data = await getLogLines(formatId, regionCode, dateFrom) - return data - } catch (err) { - throw Boom.boomify(err, { statusCode: 400 }) - } -} - -const postImportLicence = async (request, h) => { - const { licenceNumber } = request.payload - const data = { - licenceNumber, - jobNumber: 1, - numberOfJobs: 1 - } - const message = importLicenceJob.createMessage(data) - - try { - await request.server.messageQueue.publish(message) - return h.response().code(202) - } catch (err) { - throw Boom.boomify(err) - } -} - -/** - * Used to manually trigger the NALD import process - * - * When called it removes any existing 'nald-import.s3-download' job. The config for that job makes it a singleton, - * which means PGBoss will only allow one of that 'job' to be queued. The existing schedule and mechanism means one - * is always present in the queue. So, our manual trigger wouldn't work without first removing what's already there. - */ -const postImportLicences = async (request, h) => { - const message = s3DownloadJob.createMessage(false, true) - - try { - await request.server.messageQueue.deleteQueue(s3DownloadJob.name) - await request.server.messageQueue.publish(message) - - return h.response().code(202) - } catch (err) { - throw Boom.boomify(err) - } -} - -module.exports = { - getLicence, - getReturns, - getReturnsFormats, - getReturnsLogs, - getReturnsLogLines, - postImportLicence, - postImportLicences -} diff --git a/src/modules/nald-import/jobs/delete-removed-documents.js b/src/modules/nald-import/jobs/delete-removed-documents.js deleted file mode 100644 index 05817716..00000000 --- a/src/modules/nald-import/jobs/delete-removed-documents.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' - -const importService = require('../../../lib/services/import') -const QueueLicencesJob = require('./queue-licences') - -const JOB_NAME = 'nald-import.delete-removed-documents' - -function createMessage (replicateReturns) { - return { - name: JOB_NAME, - options: { - expireIn: '1 hours', - singletonKey: JOB_NAME - }, - data: { - replicateReturns - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - return importService.deleteRemovedDocuments() - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - // Publish a new job to populate pending import table but only if delete removed documents was successful - if (!job.failed) { - const { replicateReturns } = job.data.request.data - - await messageQueue.publish(QueueLicencesJob.createMessage(replicateReturns)) - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/nald-import/jobs/import-licence.js b/src/modules/nald-import/jobs/import-licence.js deleted file mode 100644 index d6c15020..00000000 --- a/src/modules/nald-import/jobs/import-licence.js +++ /dev/null @@ -1,74 +0,0 @@ -'use strict' - -const licenceLoader = require('../load') - -const JOB_NAME = 'nald-import.import-licence' - -const options = { - teamSize: 75, - teamConcurrency: 1 -} - -/** - * Data needed by the import licence handler to process the job - * - * This is a convention with PGBoss. A number of the jobs/handlers implement a `createMessage()` function which returns - * a data object that will be used to queue the job. When it then gets processed the data object is passed to the - * handler. - * - * It may also contain non-default config to be used by PGBoss when adding the job, for example, the use of - * `singletonKey` in this job. - * - * @param {Object} data - information needed for the handler to complete the job - * @param {Object.string} data.licenceNumber - reference of the licence to import - * @param {Object.number} data.jobNumber - index position of all licence numbers when this job was added to the queue - * @param {Object.number} data.numberOfJobs - total number of import-licence jobs queued in this session - * - * @returns {Object} the message object used by the handler to process the job - */ -function createMessage (data) { - return { - name: JOB_NAME, - data, - options: { - // Using the licence number as the singleton ensures PGBoss only adds one import licence job for this licence to the - // queue. If anything else tries to add a job with the same licence number PGBoss will ignore it - singletonKey: data.licenceNumber - } - } -} - -async function handler (job) { - try { - // Most 'jobs' are single operation things in the NALD import process, for example, deal with the NALD zip file or - // delete any removed documents. However, there are typically 71K instances of this job queued up as part of the - // process! Previously, we logged every instance hence this was a primary offender in adding noise to the logs. We - // removed that logging but that leaves us with no way of confirming the job is running. So, instead we get - // src/modules/nald-import/jobs/queue-licences.js to include details on how many jobs are queued and when each one - // was added to the queue. We then use this information to log when the first is picked up and the last. - // - // N.B. It's not entirely accurate. If you added logging for all back in you might see the start message appear - // after a few jobs and likewise the finished message a few before the end. But it's good enough to give an - // indication that the 'jobs' did start and finish. - if (job.data.jobNumber === 1) { - global.GlobalNotifier.omg(`${JOB_NAME}: started`, { numberOfJobs: job.data.numberOfJobs }) - } - - // Import the licence - await licenceLoader.load(job.data.licenceNumber, job.data.replicateReturns) - - if (job.data.jobNumber === job.data.numberOfJobs) { - global.GlobalNotifier.omg(`${JOB_NAME}: finished`, { numberOfJobs: job.data.numberOfJobs }) - } - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error, job.data) - throw error - } -} - -module.exports = { - createMessage, - handler, - name: JOB_NAME, - options -} diff --git a/src/modules/nald-import/jobs/queue-licences.js b/src/modules/nald-import/jobs/queue-licences.js deleted file mode 100644 index 8fb21e6d..00000000 --- a/src/modules/nald-import/jobs/queue-licences.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict' - -const assertImportTablesExist = require('../lib/assert-import-tables-exist') -const ImportLicenceJob = require('./import-licence') -const importService = require('../../../lib/services/import') - -const JOB_NAME = 'nald-import.queue-licences' - -function createMessage (replicateReturns) { - return { - name: JOB_NAME, - options: { - expireIn: '1 hours', - singletonKey: JOB_NAME - }, - data: { - replicateReturns - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - await assertImportTablesExist.assertImportTablesExist() - const licenceNumbers = await importService.getLicenceNumbers() - - return { licenceNumbers } - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - const { replicateReturns } = job.data.request.data - const { licenceNumbers } = job.data.response - const numberOfJobs = licenceNumbers.length - - for (const [index, licenceNumber] of licenceNumbers.entries()) { - // This information is to help us log when the import licence jobs start and finish. See - // src/modules/nald-import/jobs/import-licence.js for more details - const data = { - licenceNumber, - jobNumber: index + 1, - numberOfJobs, - replicateReturns - } - await messageQueue.publish(ImportLicenceJob.createMessage(data)) - } - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/nald-import/jobs/s3-download.js b/src/modules/nald-import/jobs/s3-download.js deleted file mode 100644 index 06c46b33..00000000 --- a/src/modules/nald-import/jobs/s3-download.js +++ /dev/null @@ -1,104 +0,0 @@ -'use strict' - -const applicationStateService = require('../../../lib/services/application-state-service.js') -const DeleteRemovedDocumentsJob = require('./delete-removed-documents.js') -const extractService = require('../services/extract-service.js') -const ImportLicenceJob = require('./import-licence.js') -const QueueLicences = require('./queue-licences') -const s3Service = require('../services/s3-service.js') - -const JOB_NAME = 'nald-import.s3-download' - -function createMessage (checkEtag = true, replicateReturns = false) { - return { - name: JOB_NAME, - options: { - expireIn: '1 hours', - singletonKey: JOB_NAME - }, - data: { - checkEtag, - replicateReturns - } - } -} - -async function handler (job) { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - const status = await _naldFileStatus(job.data.checkEtag) - - if (status.isRequired) { - await applicationStateService.save('nald-import', { etag: status.etag, isDownloaded: false }) - await extractService.downloadAndExtract() - await applicationStateService.save('nald-import', { etag: status.etag, isDownloaded: true }) - } - - return status - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - const { isRequired } = job.data.response - const { replicateReturns } = job.data.request.data - - if (isRequired) { - // Delete existing PG boss import queues - await Promise.all([ - messageQueue.deleteQueue(ImportLicenceJob.name), - messageQueue.deleteQueue(DeleteRemovedDocumentsJob.name), - messageQueue.deleteQueue(QueueLicences.name) - ]) - - // Publish a new job to delete any removed documents - await messageQueue.publish(DeleteRemovedDocumentsJob.createMessage(replicateReturns)) - } - } - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`, job.data.response) -} - -function _isRequired (etag, state, checkEtag) { - if (!state.isDownloaded) { - return true - } - - if (!checkEtag) { - return true - } - - return etag !== state.etag -} - -/** - * Gets status of file in S3 bucket and current application state - * @return {Promise} - */ -async function _naldFileStatus (checkEtag) { - const etag = await s3Service.getEtag() - let state - - try { - state = await applicationStateService.get('nald-import') - } catch (err) { - state = {} - } - - return { - etag, - state, - isRequired: _isRequired(etag, state, checkEtag) - } -} - -module.exports = { - createMessage, - handler, - onComplete, - name: JOB_NAME -} diff --git a/src/modules/nald-import/lib/assert-import-tables-exist.js b/src/modules/nald-import/lib/assert-import-tables-exist.js deleted file mode 100644 index 4ea6d49c..00000000 --- a/src/modules/nald-import/lib/assert-import-tables-exist.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict' - -const { NALDImportTablesError } = require('./errors') -const coreQueries = require('./nald-queries/core') - -const assertImportTablesExist = async () => { - const exists = await coreQueries.importTableExists() - if (!exists) { - throw new NALDImportTablesError() - } -} - -module.exports = { - assertImportTablesExist -} diff --git a/src/modules/nald-import/lib/constants.js b/src/modules/nald-import/lib/constants.js deleted file mode 100644 index e16f93a4..00000000 --- a/src/modules/nald-import/lib/constants.js +++ /dev/null @@ -1,11 +0,0 @@ -const config = require('../../../../config') - -module.exports = { - S3_IMPORT_PATH: config.import.nald.path, - S3_IMPORT_FILE: 'nald_enc.zip', - LOCAL_TEMP_PATH: './temp/', - CSV_DIRECTORY: 'NALD', - SCHEMA_IMPORT: 'import', - SCHEMA_TEMP: 'import_temp', - APPLICATION_STATE_KEY: 'nald-import' -} diff --git a/src/modules/nald-import/lib/due-date.js b/src/modules/nald-import/lib/due-date.js deleted file mode 100644 index 0ab9835c..00000000 --- a/src/modules/nald-import/lib/due-date.js +++ /dev/null @@ -1,71 +0,0 @@ -const moment = require('moment') -const returnsQueries = require('./nald-queries/returns') -const { returns: { date: { getPeriodEnd } } } = require('@envage/water-abstraction-helpers') -const { mapProductionMonth } = require('./transform-returns-helpers') - -/** - * Gets the return version end date of the supplied format - * @param {Object} format - * @return {String|Null} date in format YYYY-MM-DD or null - */ -const getReturnVersionEndDate = format => { - if (format.EFF_END_DATE === 'null') { - return null - } - return moment(format.EFF_END_DATE, 'DD/MM/YYYY').format('YYYY-MM-DD') -} - -/** - * Checks whether the results returned from the mod logs query contains - * one of the relevant reason codes - * @param {Array} modLogs - results from the getReturnVersionReason query - * @return {Boolean} true if a code is matched - */ -const isVariationCode = modLogs => { - const eventCodes = ['VARF', 'VARM', 'AMND', 'NAME', 'REDS', 'SPAC', 'SPAN', 'XCORR'] - const codes = modLogs.map(row => row.AMRE_CODE) - const intersection = (arr, ...args) => - arr.filter(item => args.every(arr => arr.includes(item))) - return intersection(codes, eventCodes)?.length > 0 -} - -/** - * Gets the due date for the supplied cycle end date and format - * @param {String} endDate - cycle end date YYYY-MM-DD - * @param {Object} format - the format object - * @return {Promise} resolved with due date YYYY-MM-DD - */ -const getDueDate = async (endDate, format) => { - let refDate = endDate - - const returnVersionEndDate = getReturnVersionEndDate(format) - - // If the end date of the calculated return cycle matches the end date of - // the return version, we may have a split - if (endDate === returnVersionEndDate) { - // Find the mod log reason codes for the following return version - const nextReturnVersion = parseInt(format.VERS_NO) + 1 - const results = await returnsQueries.getReturnVersionReason( - format.AABL_ID, format.FGAC_REGION_CODE, nextReturnVersion - ) - - // If the code matches, use the end date of the full return cycle to - // calculate the due date - if (isVariationCode(results)) { - const { isSummer } = mapProductionMonth(format.FORM_PRODN_MONTH) - refDate = getPeriodEnd(endDate, isSummer) - } - } - - // Due to Coronavirus in 2020, the winter/all year period ending 2020-03-31 - // had the deadline extended to 16 October - if (refDate === '2020-03-31') { - return '2020-10-16' - } - - return moment(refDate, 'YYYY-MM-DD').add(28, 'days').format('YYYY-MM-DD') -} - -module.exports = { - getDueDate -} diff --git a/src/modules/nald-import/lib/errors.js b/src/modules/nald-import/lib/errors.js deleted file mode 100644 index 018d8db0..00000000 --- a/src/modules/nald-import/lib/errors.js +++ /dev/null @@ -1,28 +0,0 @@ -const { DBError } = require('../../../lib/errors') - -class LicenceNotFoundError extends Error { - constructor (message) { - super(message) - this.name = 'LicenceNotFoundError' - } -} - -class MetaDataError extends Error { - constructor (message) { - super(message) - this.name = 'MetaDataError' - } -} - -class NALDImportTablesError extends DBError { - constructor (message) { - super(message) - this.name = 'NALDImportTablesError' - } -} - -module.exports = { - LicenceNotFoundError, - MetaDataError, - NALDImportTablesError -} diff --git a/src/modules/nald-import/lib/persist-returns.js b/src/modules/nald-import/lib/persist-returns.js deleted file mode 100644 index b257eafe..00000000 --- a/src/modules/nald-import/lib/persist-returns.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict' - -/** - * Creates or updates return cycle via returns API based on the return end date - */ -const moment = require('moment') - -const { replicateReturnsDataFromNaldForNonProductionEnvironments } = require('./returns-helper') -const returnsApi = require('../../../lib/connectors/returns') -const config = require('../../../../config') -const { returns } = returnsApi - -/** - * Checks whether return exists - * @param {String} returnId - the return ID in the returns service - * @return {Promise} resolves with boolean - */ -const returnExists = async (returnId) => { - const { data } = await returns.findOne(returnId) - if (data) { - return true - } - return false -} - -/** - * Gets update data from row - * @param {Object} row - * @return {Object} row - with only fields that we wish to update set - */ -const getUpdateRow = (row) => { - const { end_date: endDate } = row - - if (moment(endDate).isBefore('2018-10-31')) { - const { status, metadata, received_date: receivedDate, due_date: dueDate } = row - return { status, metadata, received_date: receivedDate, due_date: dueDate } - } else { - const { metadata, due_date: dueDate } = row - return { metadata, due_date: dueDate } - } -} - -/** - * Creates or updates return depending on whether start_date - * @param {Object} row - * @return {Promise} resolves when row is created/updated - */ -const createOrUpdateReturn = async (row, replicateReturns) => { - const { return_id: returnId } = row - - const exists = await returnExists(returnId) - - // Conditional update - if (exists) { - return returns.updateOne(returnId, getUpdateRow(row)) - } else { - // Insert - const thisReturn = await returns.create(row) - - /* For non-production environments, we allow the system to import the returns data so we can test billing */ - if (!config.isProduction && replicateReturns) { - await replicateReturnsDataFromNaldForNonProductionEnvironments(row) - } - return thisReturn - } -} - -/** - * Persists list of returns to API - * @param {Array} returns - * @param {Boolean} replicateReturns - * @return {Promise} resolves when all processed - */ -const persistReturns = async (returns, replicateReturns) => { - for (const ret of returns) { - if (!config.isProduction && replicateReturns) { - await returnsApi.deleteAllReturnsData(ret.return_id) - } - await createOrUpdateReturn(ret, replicateReturns) - } -} - -module.exports = { - createOrUpdateReturn, - getUpdateRow, - returnExists, - persistReturns -} diff --git a/src/modules/nald-import/lib/repository.js b/src/modules/nald-import/lib/repository.js deleted file mode 100644 index 15748787..00000000 --- a/src/modules/nald-import/lib/repository.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' - -class Repository { - /** - * Creates a repository instance for persisting NALD data to the database - * @param {Object} pool - Postgres pool instance - * @param {Object} config - configuration - * @param {String} config.table - the Postgres table name - * @param {Object} config.upsert - upsert configuration - * @param {Array} config.upsert.fields - the field(s) to detect conflict on - * @param {Array} config.upsert.set - the field(s) to update on conflict - */ - constructor (pool, config) { - this.pool = pool - this.config = config - } - - /** - * Persist one or more rows of data to the DB - * @param {Array|Object} data - a row, or multiple rows of data as a key/value hash - * @param {Array} [columns] - which columns to return - * @return {Promise} resolves with return value from Postgres pool.query - */ - async persistBatch (data, columns = null) { - const { table, upsert } = this.config - - // Convert all data to array - const insertData = Array.isArray(data) ? data : [data] - - const fields = Object.keys(insertData[0]) - - let query = `INSERT INTO ${table} (${fields.join(',')}) VALUES ` - - let queryParams = [] - const rows = insertData.map(row => { - const values = fields.map((value, i) => `$${i + 1 + queryParams.length}`) - // Add values to query params - queryParams = [...queryParams, ...Object.values(row)] - return '(' + values.join(',') + ')' - }) - - query += rows.join(',') - - if (upsert) { - const parts = upsert.set.map(field => `${field}=EXCLUDED.${field}`) - query += ` ON CONFLICT (${upsert.fields.join(',')}) DO UPDATE SET ${parts.join(',')}` - } - - if (columns) { - query += ` RETURNING ${columns.join('","')}` - } - - return this.pool.query(query, queryParams) - } - - /** - * Persists data for the named entity, if the number of query params - * will exceed the Postgres limit of 65535 they are split up into batches - * @param {Array|Object} data - data to persist - * @param {Array} - columns to return - */ - async persist (data, columns = null) { - if (Array.isArray(data) && data.length === 0) { - return - } - - const insertData = Array.isArray(data) ? data : [data] - const maxRows = Math.floor(65535 / Object.keys(insertData[0]).length) - - const chunks = this._chunk(insertData, maxRows) - const result = { rows: [], rowCount: 0 } - - for (const batch of chunks) { - const { rows, rowCount } = await this.persistBatch(batch, columns) - result.rows.push(...rows) - result.rowCount += rowCount - } - - return result - } - - _chunk (arr, chunkSize = 1, cache = []) { - const tmp = [...arr] - if (chunkSize <= 0) return cache - while (tmp.length) cache.push(tmp.splice(0, chunkSize)) - return cache - } -} - -module.exports = Repository diff --git a/src/modules/nald-import/lib/returns-helper.js b/src/modules/nald-import/lib/returns-helper.js deleted file mode 100644 index 38d1889f..00000000 --- a/src/modules/nald-import/lib/returns-helper.js +++ /dev/null @@ -1,126 +0,0 @@ -'use strict' - -const moment = require('moment') -const { v4: uuid } = require('uuid') -const db = require('./db') -const returnsApi = require('../../../lib/connectors/returns') -const { versions, lines } = returnsApi - -const dbdateformat = 'YYYY-MM-DD' - -/* UTILS */ -const plainEnglishFrequency = (val = 'M') => ({ - D: 'day', - W: 'week', - M: 'month', - Y: 'year' -}[val]) - -const padDateComponent = (val = '1') => val.length === 1 ? `0${val}` : val - -const createLine = (versionId, startDate, endDate, frequency, line, qtyKey) => parseFloat(line[qtyKey]) > 0 && - lines.create({ - line_id: uuid(), - version_id: versionId, - substance: 'water', - quantity: parseFloat(line[qtyKey]), - unit: 'm³', - user_unit: 'm³', - start_date: startDate, - end_date: endDate, - time_period: plainEnglishFrequency(frequency), - metadata: JSON.stringify(line), - reading_type: 'measured' - }) - -/* MAIN FUNC */ -const replicateReturnsDataFromNaldForNonProductionEnvironments = async thisReturn => { - const { metadata } = thisReturn - // create returns.versions - const version = await versions.create({ - metadata, - version_id: uuid(), - return_id: thisReturn.return_id, - user_id: 'imported@from.nald', - user_type: 'system', - version_number: JSON.parse(metadata).version, - nil_return: false, - current: JSON.parse(metadata).isCurrent - }) - - const naldReturnFormatQuery = await db.dbQuery('SELECT * FROM import."NALD_RET_FORMATS" WHERE "ID" = $1', [thisReturn.return_requirement]) - const naldReturnFormat = naldReturnFormatQuery[0] - - const naldLinesFromNaldReturnFormLogs = await db.dbQuery(` - SELECT * FROM import."NALD_RET_FORM_LOGS" - WHERE "ARTY_ID" = $1 - AND "FGAC_REGION_CODE" = $2 - AND "DATE_FROM" = $3 - ORDER BY to_date("DATE_FROM", 'DD/MM/YYYY')`, - [ - thisReturn.return_requirement, - naldReturnFormat.FGAC_REGION_CODE, - `${padDateComponent(naldReturnFormat.ABS_PERIOD_ST_DAY)}/${padDateComponent(naldReturnFormat.ABS_PERIOD_ST_MONTH)}/${moment(thisReturn.start_date).format('YYYY')}` - ]) - - const returnLinesFromNaldReturnLines = await db.dbQuery(` - SELECT * FROM import."NALD_RET_LINES" WHERE - "ARFL_ARTY_ID" = $1 - AND "FGAC_REGION_CODE" = $2 - AND to_date("RET_DATE", 'YYYYMMDDHH24MISS')>=to_date($3, $5) - AND to_date("RET_DATE", 'YYYYMMDDHH24MISS')<=to_date($4, $5) - ORDER BY "RET_DATE"; - `, - [ - thisReturn.return_requirement, - naldReturnFormat.FGAC_REGION_CODE, - `${moment(thisReturn.start_date).format('YYYY')}-${padDateComponent(naldReturnFormat.ABS_PERIOD_ST_MONTH)}-${padDateComponent(naldReturnFormat.ABS_PERIOD_ST_DAY)}`, - `${moment(thisReturn.start_date).add(naldReturnFormat.ABS_PERIOD_END_MONTH < naldReturnFormat.ABS_PERIOD_ST_MONTH ? 1 : 0, 'year').format('YYYY')}-${padDateComponent(naldReturnFormat.ABS_PERIOD_END_MONTH)}-${padDateComponent(naldReturnFormat.ABS_PERIOD_END_DAY)}`, - dbdateformat - ]) - - // Two db queries have been run - // Now we check if they have pulled any data from the db - // If they have, set qtyKey to be the name of the column we will be summing up - // Set iterable to the data set we will be summing - let qtyKey - let iterable - - if (returnLinesFromNaldReturnLines.length > 0) { - qtyKey = 'RET_QTY' - iterable = returnLinesFromNaldReturnLines - } else if (naldLinesFromNaldReturnFormLogs.length > 0) { - qtyKey = 'MONTHLY_RET_QTY' - iterable = naldLinesFromNaldReturnFormLogs - } else { - iterable = [] - } - - const sumOfLines = iterable - .map((item) => parseFloat(item[qtyKey])) - .filter((value) => ![null, undefined, NaN].includes(value)) - .reduce((acc, num) => acc + num, 0) - - if (sumOfLines === 0) { - await versions.updateOne(version.data.version_id, { nil_return: true }, ['nil_return']) - } else { - iterable.forEach(line => { - let startDate - let endDate - - if (returnLinesFromNaldReturnLines.length > 0) { - startDate = moment(line.RET_DATE, 'YYYYMMDD000000').format(dbdateformat) - endDate = moment(line.RET_DATE, 'YYYYMMDD000000').format(dbdateformat) - } else { - startDate = moment(line.FORM_PROD_ST_DATE, 'DD/MM/YYYY').format(dbdateformat) - endDate = moment(line.FORM_PROD_ST_DATE, 'DD/MM/YYYY').format(dbdateformat) - } - - createLine(version.data.version_id, startDate, endDate, naldReturnFormat.ARTC_REC_FREQ_CODE, line, qtyKey) - }) - } -} - -module.exports = { - replicateReturnsDataFromNaldForNonProductionEnvironments -} diff --git a/src/modules/nald-import/lib/transform-returns-helpers.js b/src/modules/nald-import/lib/transform-returns-helpers.js deleted file mode 100644 index 9b4343bf..00000000 --- a/src/modules/nald-import/lib/transform-returns-helpers.js +++ /dev/null @@ -1,316 +0,0 @@ -'use strict' - -const moment = require('moment') - -const waterHelpers = require('@envage/water-abstraction-helpers') - -const { returns: { date: { getPeriodStart } } } = waterHelpers -const naldFormatting = waterHelpers.nald.formatting - -const mapPeriod = (str) => { - const periods = { - D: 'day', - W: 'week', - M: 'month', - Q: 'quarter', - A: 'year' - } - return periods[str] -} - -/** - * Gets additional NALD data to store in return metadata - * @param {Object} format - the return format record from NALD data - * @return {Object} metadata to store - */ -const formatReturnNaldMetadata = (format) => { - return { - regionCode: parseInt(format.FGAC_REGION_CODE), - areaCode: format.AREP_AREA_CODE, - formatId: parseInt(format.ID), - periodStartDay: format.ABS_PERIOD_ST_DAY, - periodStartMonth: format.ABS_PERIOD_ST_MONTH, - periodEndDay: format.ABS_PERIOD_END_DAY, - periodEndMonth: format.ABS_PERIOD_END_MONTH - } -} - -/** - * Returns the trimmed purpose alias if it is not - * already empty or equal to 'null' - */ -const getPurposeAlias = purpose => { - const alias = (purpose.PURP_ALIAS || '').trim() - - if (!['', 'null'].includes(alias.toLowerCase())) { - return alias - } -} - -/** - * Gets metadata object to store in returns table - * @param {Object} format - * @return {Object} return metadata - */ -const formatReturnMetadata = (format) => { - const { isSummer, isUpload, isLineEntry } = mapProductionMonth(format.FORM_PRODN_MONTH) - - return { - version: 1, - description: format.SITE_DESCR, - purposes: format.purposes.map(purpose => ({ - primary: { - code: purpose.APUR_APPR_CODE, - description: purpose.primary_purpose - }, - secondary: { - code: purpose.APUR_APSE_CODE, - description: purpose.secondary_purpose - }, - tertiary: { - code: purpose.APUR_APUS_CODE, - description: purpose.tertiary_purpose - }, - alias: getPurposeAlias(purpose) - })), - points: format.points.map(point => { - return naldFormatting.formatAbstractionPoint(waterHelpers.nald.transformNull(point)) - }), - nald: formatReturnNaldMetadata(format), - isTwoPartTariff: format.TPT_FLAG === 'Y', - isSummer, - isUpload: isUpload || isLineEntry - } -} - -/** - * Maps NALD production month - * @param {Number} month - * @return {Object} - */ -const mapProductionMonth = (month) => { - const intMonth = parseInt(month) - return { - isSummer: [65, 45, 80].includes(intMonth), - isUpload: [65, 66].includes(intMonth), - isLineEntry: [45, 46].includes(intMonth), - formProduced: [80, 70].includes(intMonth) - } -} - -/** - * A return may comprise more than one form log - * If any form log has not been received, we return null - * If there are no form log, return null - * otherwise return max received last date - * @param {Array} logs - form log records - */ -const mapReceivedDate = (logs) => { - const dates = logs.map(row => row.RECD_DATE) - - if (logs.length < 1) { - return null - } - - if (dates.findIndex(val => val === 'null') !== -1) { - return null - } - - const timestamps = dates.map(date => moment(date, 'DD/MM/YYYY HH:mm:ss').format('YYYY-MM-DD')) - - // Sorting the timeStamps into highest value first and returning that individual value - const max = (timeStamp) => { - const sorted = timeStamp.sort((startTime1, startTime2) => { - if ((startTime1.unix > startTime2.unix)) { - return -1 - } else { - return 1 - } - }) - return sorted[0] - } - return max(timestamps) -} - -/** - * Split dates are the start date of the period. This function - * adds the period end dates to the array - * @param {Array} arr - the array of split dates - * @return {Array} an array containing the split dates and previous day - */ -const sortAndPairSplitDates = (arr) => { - const sorted = arr.map(val => moment(val).format('YYYY-MM-DD')).sort() - - return sorted.reduce((acc, value) => { - acc.push(moment(value).subtract(1, 'day').format('YYYY-MM-DD')) - acc.push(value) - return acc - }, []) -} - -/** - * Adds a date to the supplied array, if it is between but not equal to the - * supplied start and end dates - * @param {Array} dates - * @param {String} date - YYYY-MM-DD, the date to add to array - * @param {String} startDate - the start of the date range YYYY-MM-DD - * @param {String} endDate - the end of the date range YYYY-MM-DD - * @return {Array} new date array - */ -const addDate = (dates, date, startDate, endDate) => { - const m = moment(date) - const dateStr = m.format('YYYY-MM-DD') - const arr = dates.slice() - const isValid = m.isBetween(startDate, endDate, 'day', '()') - const isUnique = !arr.includes(dateStr) - if (isValid && isUnique) { - arr.push(dateStr) - } - return arr -} - -const chunk = (arr, chunkSize = 1, cache = []) => { - const tmp = [...arr] - if (chunkSize <= 0) return cache - while (tmp.length) cache.push(tmp.splice(0, chunkSize)) - return cache -} - -/** - * Gets summer/financial year return cycles, including splitting the cycles - * by licence effective start date (split date) - * @param {String} startDate - YYYY-MM-DD - * @param {String} endDate - YYYY-MM-DD - * @param {String} splitDate - licence effective start date - * @param {Boolean} isSummer - whether summer return cycle (default financial year) - * @return {Array} of return cycle objects - */ -const getReturnCycles = (startDate, endDate, splitDate, isSummer = false) => { - let splits = [] - - // Add split date - if (splitDate) { - splits = addDate(splits, splitDate, startDate, endDate) - } - - // Date pointer should be within summer/financial year - let datePtr = moment().year() - do { - datePtr = getPeriodStart(datePtr, isSummer) - splits = addDate(splits, datePtr, startDate, endDate) - datePtr = moment(datePtr).add(1, 'year') - } - while (moment(datePtr).isBefore(endDate)) - - const dates = chunk([startDate, ...sortAndPairSplitDates(splits), endDate], 2) - - return dates.map(arr => ({ - startDate: arr[0], - endDate: arr[1], - isCurrent: (splitDate === null) || moment(arr[0]).isSameOrAfter(splitDate) - })) -} - -/** - * Given format/return version data, gets the start and end dates of - * this format - * @param {Object} format - * @return {String} date YYYY-MM-DD - */ -const getFormatStartDate = (format) => { - const versionStartDate = moment(format.EFF_ST_DATE, 'DD/MM/YYYY') - const timeLimitedStartDate = moment(format.TIMELTD_ST_DATE, 'DD/MM/YYYY') - - if (timeLimitedStartDate.isValid() && timeLimitedStartDate.isAfter(versionStartDate)) { - return timeLimitedStartDate.format('YYYY-MM-DD') - } - return versionStartDate.format('YYYY-MM-DD') -} - -/** - * Finds the earlist valid date that represents the end date of - * the given format. - * - * Returns null, if no format end date. - * @param {Object} format - * @return {String} date YYYY-MM-DD or null - */ -const getFormatEndDate = (format) => { - const { - EFF_END_DATE, - TIMELTD_END_DATE, - LICENCE_LAPSED_DATE, - LICENCE_REVOKED_DATE, - LICENCE_EXPIRY_DATE - } = format - - const dates = Object.values({ - EFF_END_DATE, - TIMELTD_END_DATE, - LICENCE_LAPSED_DATE, - LICENCE_REVOKED_DATE, - LICENCE_EXPIRY_DATE - }) - - const validDates = dates - .map(date => moment(date, 'DD/MM/YYYY')) - .filter(date => date.isValid()) - .sort(chronologicalMomentSort) - .map(date => date.format('YYYY-MM-DD')) - - return validDates.length ? validDates[0] : null -} - -/** - * A sort camparator that will sort moment dates in ascending order - */ -const chronologicalMomentSort = (left, right) => left.diff(right) - -/** - * Gets cycles for a given format. If the format has no effective end date, - * then one is created at the end of the following year. These will be filtered - * out later by checking if form logs exist for the cycles calculated - * @param {Object} row of data from NALD_RET_FORMATS - * @return {Array} array of return cycles with startDate and endDate in each item - */ -const getFormatCycles = (format, splitDate) => { - const { - FORM_PRODN_MONTH: productionMonth - } = format - - // Get summer cycle flag - const { isSummer } = mapProductionMonth(productionMonth) - - // Get start/end date for format, taking into account version dates and - // time-limited dates - const startDate = getFormatStartDate(format) - let endDate = getFormatEndDate(format) - - // If no end date, set date in future - if (!endDate) { - endDate = moment(getPeriodStart(moment().add(1, 'years'), isSummer)).subtract(1, 'day').format('YYYY-MM-DD') - } - - return getReturnCycles(startDate, endDate, splitDate, isSummer) -} - -/** - * Gets return status - * @param {String|null} receivedDate - the date received from NALD form logs YYYY-MM-DD, or null - * @param {String} startDate - the start date of the return period - * @return {String} status - completed|due - */ -const getStatus = receivedDate => receivedDate === null ? 'due' : 'completed' - -module.exports = { - mapPeriod, - mapProductionMonth, - formatReturnMetadata, - getFormatCycles, - mapReceivedDate, - getReturnCycles, - addDate, - getStatus, - getFormatStartDate, - getFormatEndDate -} diff --git a/src/modules/nald-import/load.js b/src/modules/nald-import/load.js deleted file mode 100644 index d48b4f2b..00000000 --- a/src/modules/nald-import/load.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict' - -/** - * Code for loading imported data to the target database(s) - */ -const { v4: uuidV4 } = require('uuid') -const { buildCRMPacket } = require('./transform-crm') -const { buildReturnsPacket } = require('./transform-returns') -const { getLicenceJson, buildPermitRepoPacket } = require('./transform-permit') -const returnsConnector = require('../../lib/connectors/returns') -const { persistReturns } = require('./lib/persist-returns') - -const repository = require('./repositories') - -/** - * Loads data into the permit repository and CRM doc header - * @param {String} licenceNumber - * @param {Object} licenceData - extracted from NALD import tables - * @return {Promise} resolves when completed - */ -const loadPermitAndDocumentHeader = async (licenceNumber, licenceData) => { - const permit = buildPermitRepoPacket(licenceNumber, 1, 8, licenceData) - const { rows: [{ licence_id: permitRepoId }] } = await repository.licence.persist(permit, ['licence_id']) - - // Create CRM data and persist - const crmData = buildCRMPacket(licenceData, licenceNumber, permitRepoId) - await repository.document.persist({ document_id: uuidV4(), ...crmData }) -} - -/** - * Calculates and imports a list of return cycles for the given licence - * based on NALD formats and form logs - * @param {String} licenceNumber - * @param {Boolean} replicateReturns - * @return {Promise} resolves when returns imported - */ -const loadReturns = async (licenceNumber, replicateReturns) => { - const { returns } = await buildReturnsPacket(licenceNumber) - await persistReturns(returns, replicateReturns) - - // Clean up invalid cycles - const returnIds = returns.map(row => row.return_id) - await returnsConnector.voidReturns(licenceNumber, returnIds) -} - -/** - * Imports the whole licence - * @param {String} licenceNumber - * @param {Boolean} replicateReturns - * @return {Promise} resolves when complete - */ -const load = async (licenceNumber, replicateReturns) => { - const licenceData = await getLicenceJson(licenceNumber) - - if (licenceData.data.versions.length > 0) { - await loadPermitAndDocumentHeader(licenceNumber, licenceData) - await loadReturns(licenceNumber, replicateReturns) - } -} - -module.exports = { - load -} diff --git a/src/modules/nald-import/plugin.js b/src/modules/nald-import/plugin.js deleted file mode 100644 index 4cd86be4..00000000 --- a/src/modules/nald-import/plugin.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' - -const cron = require('node-cron') - -const DeleteRemovedDocumentsJob = require('./jobs/delete-removed-documents.js') -const ImportLicenceJob = require('./jobs/import-licence.js') -const QueueLicencesJob = require('./jobs/queue-licences.js') -const S3DownloadJob = require('./jobs/s3-download.js') - -const config = require('../../../config') - -async function register (server) { - // These are the steps in the import process. This is creating the 'queues' and where relevant, setting the - // a handler to be called when a job in that queue completes - - // First step is to download the nald_enc.zip from S3, extract it, and push the data into 'import' - await server.messageQueue.subscribe(S3DownloadJob.name, S3DownloadJob.handler) - await server.messageQueue.onComplete(S3DownloadJob.name, (executedJob) => { - return S3DownloadJob.onComplete(server.messageQueue, executedJob) - }) - - // Next step is to delete documents that have been removed from NALD - await server.messageQueue.subscribe(DeleteRemovedDocumentsJob.name, DeleteRemovedDocumentsJob.handler) - await server.messageQueue.onComplete(DeleteRemovedDocumentsJob.name, (executedJob) => { - return DeleteRemovedDocumentsJob.onComplete(server.messageQueue, executedJob) - }) - - // Then we get the licences to import and publish a job for each one - await server.messageQueue.subscribe(QueueLicencesJob.name, QueueLicencesJob.handler) - await server.messageQueue.onComplete(QueueLicencesJob.name, (executedJob) => { - return QueueLicencesJob.onComplete(server.messageQueue, executedJob) - }) - - // Then we import each licence - await server.messageQueue.subscribe(ImportLicenceJob.name, ImportLicenceJob.options, ImportLicenceJob.handler) - - cron.schedule(config.import.nald.schedule, async () => { - await server.messageQueue.publish(S3DownloadJob.createMessage()) - }) -} - -module.exports = { - plugin: { - name: 'importNaldData', - dependencies: ['pgBoss'], - register - } -} diff --git a/src/modules/nald-import/repositories.js b/src/modules/nald-import/repositories.js deleted file mode 100644 index b982bc78..00000000 --- a/src/modules/nald-import/repositories.js +++ /dev/null @@ -1,55 +0,0 @@ -const Repository = require('./lib/repository') -const { pool } = require('../../lib/connectors/db') - -const repoConfig = { - - licence: { - table: 'permit.licence', - upsert: { - fields: ['licence_regime_id', 'licence_type_id', 'licence_ref'], - set: ['licence_status_id', 'licence_search_key', 'is_public_domain', 'licence_start_dt', 'licence_end_dt', 'licence_data_value'] - } - }, - - document: { - table: 'crm.document_header', - upsert: { - fields: ['system_id', 'system_internal_id', 'regime_entity_id'], - set: ['system_external_id', 'metadata'] - } - }, - - return: { - table: 'returns.returns', - primaryKey: 'return_id', - upsert: { - fields: ['return_id'], - set: ['regime', 'licence_type', 'licence_ref', 'start_date', 'end_date', 'returns_frequency', 'status', 'source', 'metadata', 'received_date'] - } - }, - - version: { - table: 'returns.versions', - upsert: { - fields: ['version_id'], - set: ['return_id', 'user_id', 'user_type', 'version_number', 'metadata', 'nil_return'] - } - }, - - line: { - table: 'returns.lines', - upsert: { - fields: ['version_id', 'end_date'], - set: ['version_id', 'substance', 'quantity', 'unit', 'start_date', 'end_date', 'time_period', 'metadata', 'reading_type'] - } - } - -} - -module.exports = { - licence: new Repository(pool, repoConfig.licence), - document: new Repository(pool, repoConfig.document), - return: new Repository(pool, repoConfig.return), - version: new Repository(pool, repoConfig.version), - line: new Repository(pool, repoConfig.line) -} diff --git a/src/modules/nald-import/routes.js b/src/modules/nald-import/routes.js deleted file mode 100644 index 22a38912..00000000 --- a/src/modules/nald-import/routes.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const Joi = require('@hapi/joi') - -const controller = require('./controller') - -module.exports = [ - { - method: 'GET', - path: '/import/1.0/nald/licence', - handler: controller.getLicence, - config: { description: 'Get permit repo packet by licence number' } - }, - { - method: 'GET', - path: '/import/1.0/nald/returns', - handler: controller.getReturns, - config: { description: 'Get a returns data packet by licence number' } - }, - { - method: 'GET', - path: '/import/1.0/nald/returns/formats', - handler: controller.getReturnsFormats, - config: { description: 'Gets a returns formats for given licence number' } - }, - { - method: 'GET', - path: '/import/1.0/nald/returns/logs', - handler: controller.getReturnsLogs, - config: { description: 'Gets a returns logs for given format' } - }, - { - method: 'GET', - path: '/import/1.0/nald/returns/lines', - handler: controller.getReturnsLogLines, - config: { description: 'Gets a returns lines for a given log' } - }, - { - method: 'POST', - path: '/import/1.0/nald/licence', - handler: controller.postImportLicence, - options: { - validate: { - payload: { - licenceNumber: Joi.string().required() - } - } - } - }, - { - method: 'POST', - path: '/import/1.0/nald/licences', - handler: controller.postImportLicences, - config: { description: 'Trigger the NALD licences import process' } - } -] diff --git a/src/modules/nald-import/services/extract-service.js b/src/modules/nald-import/services/extract-service.js deleted file mode 100644 index 73dda9c4..00000000 --- a/src/modules/nald-import/services/extract-service.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict' - -const path = require('path') -const processHelper = require('@envage/water-abstraction-helpers').process - -const constants = require('../lib/constants') - -const s3Service = require('./s3-service') -const zipService = require('./zip-service') -const schemaService = require('./schema-service') -const loadCsvService = require('./load-csv-service') - -// Download / unzip paths -const FINAL_PATH = path.join(constants.LOCAL_TEMP_PATH, constants.CSV_DIRECTORY) - -/** - * Prepares for import by removing files from temporary folder and creating directory - */ -const prepare = async () => { - await processHelper.execCommand(`rm -rf ${constants.LOCAL_TEMP_PATH}`) - await processHelper.execCommand(`mkdir -p ${constants.LOCAL_TEMP_PATH}`) - await processHelper.execCommand(`mkdir -p ${FINAL_PATH}`) -} - -const steps = [ - { - message: 'preparing folders', - action: prepare - }, - { - message: 'downloading from s3', - action: () => s3Service.download() - }, - { - message: 'extracting files from zip', - action: () => zipService.extract() - }, - { - message: 'create import_temp schema', - action: () => schemaService.dropAndCreateSchema(constants.SCHEMA_TEMP) - }, - { - message: 'importing CSV files', - action: () => loadCsvService.importFiles(constants.SCHEMA_TEMP) - }, - { - message: 'swapping schema from import_temp to import', - action: () => schemaService.swapTemporarySchema() - }, - { - message: 'cleaning up local files', - action: prepare - } -] - -/** - * The download/extract tasks have been combined into a single task - * since they are currently running on the local file system, so must all - * run on the same instance - * @return {Promise} - */ -const downloadAndExtract = async () => { - for (const step of steps) { - await step.action() - } -} - -/** - * Move test files - * For the purposes of unit testing, this copies dummy CSV files from a test - * folder to the import folder ready for the import script - * @return {Promise} resolves when command completes - */ -const copyTestFiles = async () => { - await prepare() - await schemaService.dropAndCreateSchema(constants.SCHEMA_IMPORT) - - // move dummy data files - await processHelper.execCommand(`cp ./test/dummy-csv/* ${FINAL_PATH}`) - - // Import CSV - return loadCsvService.importFiles(constants.SCHEMA_IMPORT) -} - -module.exports = { - copyTestFiles, - downloadAndExtract -} diff --git a/src/modules/nald-import/services/s3-service.js b/src/modules/nald-import/services/s3-service.js deleted file mode 100644 index 5fb382f9..00000000 --- a/src/modules/nald-import/services/s3-service.js +++ /dev/null @@ -1,30 +0,0 @@ -const path = require('path') -const s3 = require('../../../lib/services/s3') -const constants = require('../lib/constants') - -const s3Path = path.join(constants.S3_IMPORT_PATH, constants.S3_IMPORT_FILE) -const localPath = path.join(constants.LOCAL_TEMP_PATH, constants.S3_IMPORT_FILE) - -/** - * Gets the ETag of the current NALD zip in the S3 bucket - * This is a hash of the file contents which can be used - * to detect changes - * @return {Promise} - */ -const getEtag = async () => { - const result = await s3.getHead(s3Path) - return result.ETag.replace(/"/g, '') -} - -/** - * Downloads latest ZIP file from S3 bucket - * @return {Promise} resolves when download complete - */ -const download = async () => { - return s3.download(s3Path, localPath) -} - -module.exports = { - getEtag, - download -} diff --git a/src/modules/nald-import/services/schema-service.js b/src/modules/nald-import/services/schema-service.js deleted file mode 100644 index e0bac73e..00000000 --- a/src/modules/nald-import/services/schema-service.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const db = require('../lib/db') -const constants = require('../lib/constants') - -const dropSchema = name => db.dbQuery(`drop schema if exists ${name} cascade`) -const createSchema = name => db.dbQuery(`create schema if not exists ${name}`) -const renameSchema = (from, to) => db.dbQuery(`alter schema ${from} rename to ${to};`) - -/** - * Drops and creates the import schema ready to import the CSVs as tables - * @schemaName {String} The name of the schema to recreate. - * @return {Promise} - */ -async function dropAndCreateSchema (schemaName = constants.SCHEMA_IMPORT) { - await dropSchema(schemaName) - await createSchema(schemaName) -} - -const swapTemporarySchema = async () => { - await dropSchema(constants.SCHEMA_IMPORT) - await renameSchema(constants.SCHEMA_TEMP, constants.SCHEMA_IMPORT) -} - -module.exports = { - dropAndCreateSchema, - swapTemporarySchema, - renameSchema -} diff --git a/src/modules/nald-import/services/zip-service.js b/src/modules/nald-import/services/zip-service.js deleted file mode 100644 index 228ff543..00000000 --- a/src/modules/nald-import/services/zip-service.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict' - -const path = require('path') - -const processHelper = require('@envage/water-abstraction-helpers').process -const constants = require('../lib/constants') -const config = require('../../../../config') - -const primaryPath = path.join(constants.LOCAL_TEMP_PATH, constants.S3_IMPORT_FILE) -const secondaryPath = path.join(constants.LOCAL_TEMP_PATH, `${constants.CSV_DIRECTORY}.zip`) - -/** - * - * @param {String} source - file - * @param {String} destination - file - * @param {String} [password] - password if encrypted - */ -const extractArchive = async (source, destination, password) => { - let command = `7z x ${source} -o${destination}` - if (password) { - command += ` -p${password}` - } - await processHelper.execCommand(command) -} - -/** - * Extracts files from zip downloaded from S3 bucket - */ -const extract = async () => { - const zipPassword = config.import.nald.zipPassword - await extractArchive(primaryPath, constants.LOCAL_TEMP_PATH, zipPassword) - await extractArchive(secondaryPath, constants.LOCAL_TEMP_PATH) -} - -module.exports = { - extract -} diff --git a/src/modules/nald-import/transform-returns.js b/src/modules/nald-import/transform-returns.js deleted file mode 100644 index b2d59945..00000000 --- a/src/modules/nald-import/transform-returns.js +++ /dev/null @@ -1,104 +0,0 @@ -'use strict' - -const moment = require('moment') -const queries = require('./lib/nald-queries/returns') - -const helpers = require('./lib/transform-returns-helpers.js') - -const dueDate = require('./lib/due-date') - -const { getReturnId } = require('@envage/water-abstraction-helpers').returns - -/** - * Loads licence formats from DB - * @param {String} licenceNumber - * @return {Promise} resolves with array of formats - */ -const getLicenceFormats = async (licenceNumber) => { - const splitDate = await queries.getSplitDate(licenceNumber) - - const formats = await queries.getFormats(licenceNumber) - - // Load format data - for (const format of formats) { - format.purposes = await queries.getFormatPurposes(format.ID, format.FGAC_REGION_CODE) - format.points = await queries.getFormatPoints(format.ID, format.FGAC_REGION_CODE) - format.cycles = helpers.getFormatCycles(format, splitDate) - } - return formats -} - -const getCycleLogs = (logs, startDate, endDate) => { - return logs.filter(log => { - return ( - moment(log.DATE_TO, 'DD/MM/YYYY').isSameOrAfter(startDate) && - moment(log.DATE_FROM, 'DD/MM/YYYY').isSameOrBefore(endDate) - ) - }) -} - -/** - * @param {String} licenceNumber - the abstraction licence number - */ -const buildReturnsPacket = async (licenceNumber) => { - const formats = await getLicenceFormats(licenceNumber) - - const returnsData = { - returns: [] - } - - for (const format of formats) { - // Get all the logs for the format here and filter later by cycle. - // This saves having to make many requests to the database for - // each format cycle. - const logs = await queries.getLogs(format.ID, format.FGAC_REGION_CODE) - - for (const cycle of format.cycles) { - const { startDate, endDate, isCurrent } = cycle - - // Get all form logs relating to this cycle - const cycleLogs = getCycleLogs(logs, startDate, endDate) - - // Only create return cycles for formats with logs to allow NALD prepop to - // drive online returns - if (cycleLogs.length === 0) { - continue - } - - const returnId = getReturnId(format.FGAC_REGION_CODE, licenceNumber, format.ID, startDate, endDate) - const receivedDate = helpers.mapReceivedDate(cycleLogs) - const status = helpers.getStatus(receivedDate) - - // Create new return row - const returnRow = { - return_id: returnId, - regime: 'water', - licence_type: 'abstraction', - licence_ref: licenceNumber, - start_date: startDate, - end_date: endDate, - due_date: await dueDate.getDueDate(endDate, format), - returns_frequency: helpers.mapPeriod(format.ARTC_REC_FREQ_CODE), - status, - source: 'NALD', - metadata: JSON.stringify({ - ...helpers.formatReturnMetadata(format), - isCurrent, - isFinal: moment(endDate).isSame(helpers.getFormatEndDate(format), 'day') - }), - received_date: receivedDate, - return_requirement: format.ID - } - - returnsData.returns.push(returnRow) - } - } - - return returnsData -} - -module.exports = { - buildReturnsPacket, - getLicenceFormats, - getCycleLogs -} diff --git a/src/modules/nightly-import/controllers.js b/src/modules/nightly-import/controllers.js new file mode 100644 index 00000000..5bf1e9cd --- /dev/null +++ b/src/modules/nightly-import/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function nightlyImport (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + nightlyImport +} diff --git a/src/modules/nightly-import/process-steps.js b/src/modules/nightly-import/process-steps.js new file mode 100644 index 00000000..9fcdd60e --- /dev/null +++ b/src/modules/nightly-import/process-steps.js @@ -0,0 +1,241 @@ +'use strict' + +const CleanProcessSteps = require('../clean/process-steps.js') +const CompanyDetailsProcessSteps = require('../company-details/process-steps.js') +const ModLogsProcessSteps = require('../mod-logs/process-steps.js') +const NaldDataProcessSteps = require('../nald-data/process-steps.js') +const PermitProcessSteps = require('../permit/process-steps.js') +const ReturnVersionsProcessSteps = require('../return-versions/process-steps.js') +const LicenceDetailsProcessSteps = require('../licence-details/process-steps.js') +const TrackerProcessSteps = require('../tracker/process-steps.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + const allResults = _allResults() + + let emailMessage = 'nightly-import process' + + try { + global.GlobalNotifier.omg('nightly-import started') + + const startTime = currentTimeInNanoseconds() + + await _naldDataProcess(allResults) + await _cleanProcess(allResults) + await _companyDetailsProcess(allResults) + await _licenceDetailsProcess(allResults) + await _modLogsProcess(allResults) + await _returnVersionsProcess(allResults) + await _permitProcess(allResults) + + const logData = calculateAndLogTimeTaken(startTime, 'nightly-import complete') + const timeMessage = _timeMessage(logData.timeTakenSs) + emailMessage = _message(allResults, timeMessage) + } catch (error) { + global.GlobalNotifier.omfg('nightly-import errored') + emailMessage = `Nightly import process errored: ${error.message}` + } + + await _trackerProcess(emailMessage) +} + +function _allResults () { + return { + naldData: { + title: 'NALD data process', + description: 'Download the NALD zip file, extract the .txt files, then import into DB ready for processing.', + attempted: false, + completed: false + }, + clean: { + title: 'Clean process', + description: 'Marks deleted any crm.document_headers, crm_v2.documents with no matching licence in NALD, then hard deletes any return requirements with no match in NALD and no return log in WRLS.', + attempted: false, + completed: false + }, + companyDetails: { + title: 'Company details process', + description: "Imports NALD 'party' data as companies, contacts and addresses into crm_v2.", + attempted: false, + completed: false + }, + licenceDetails: { + title: 'Licence details process', + description: 'Imports NALD licence data as the new version of the licence: water.licence and all child records.', + attempted: false, + completed: false + }, + modLogs: { + title: 'Mod logs process', + description: 'Imports NALD mod log data, links it to existing records, and where possible maps NALD reasons to WRLS ones.', + attempted: false, + completed: false + }, + returnVersions: { + title: 'Return versions process', + description: 'Imports NALD return version data, then corrects known issues with it, ready for use in WRLS.', + attempted: false, + completed: false + }, + permit: { + title: 'Permit process', + description: 'Imports NALD data as the old versions of the licence: permit.licence and crm.document_header.', + attempted: false, + completed: false + } + } +} + +async function _cleanProcess (allResults) { + try { + const { clean, naldData } = allResults + + if (!naldData.completed) { + return + } + + clean.attempted = true + clean.completed = await CleanProcessSteps.go() + } catch (error) { + global.GlobalNotifier.oops('nightly-import clean-process failed') + } +} + +async function _companyDetailsProcess (allResults) { + try { + const { clean, companyDetails } = allResults + + if (!clean.completed) { + return + } + + companyDetails.attempted = true + + const { counts, processComplete } = await CompanyDetailsProcessSteps.go() + + companyDetails.completed = processComplete + companyDetails.counts = counts + } catch (error) { + global.GlobalNotifier.oops('nightly-import company-details process failed') + } +} + +async function _licenceDetailsProcess (allResults) { + try { + const { companyDetails, licenceDetails } = allResults + + if (!companyDetails.completed) { + return + } + + licenceDetails.attempted = true + + const { counts, processComplete } = await LicenceDetailsProcessSteps.go() + + licenceDetails.completed = processComplete + licenceDetails.counts = counts + } catch (error) { + global.GlobalNotifier.oops('nightly-import licence-details process failed') + } +} + +function _message (allResults, timeMessage) { + const messages = [] + + Object.values(allResults).forEach((result) => { + const { attempted, completed, description, title } = result + let message = `${title}\n${description}\nAttempted: ${attempted}\nCompleted: ${completed}` + + if (result.counts) { + message = `${message} (${result.counts.rejected} of ${result.counts.count} rejected)` + } + + messages.push(message) + }) + + messages.push(timeMessage) + + return messages.join('\n\n') +} + +async function _modLogsProcess (allResults) { + try { + const { licenceDetails, modLogs } = allResults + + if (!licenceDetails.completed) { + return + } + + modLogs.attempted = true + modLogs.completed = await ModLogsProcessSteps.go() + } catch (error) { + global.GlobalNotifier.oops('nightly-import mod-logs process failed') + } +} + +async function _naldDataProcess (allResults) { + try { + const { naldData } = allResults + + naldData.attempted = true + naldData.completed = await NaldDataProcessSteps.go() + } catch (error) { + global.GlobalNotifier.oops('nightly-import nald-data failed') + } +} + +async function _permitProcess (allResults) { + try { + const { permit, returnVersions } = allResults + + if (!returnVersions.completed) { + return + } + + permit.attempted = true + + const { counts, processComplete } = await PermitProcessSteps.go() + + permit.completed = processComplete + permit.counts = counts + } catch (error) { + global.GlobalNotifier.oops('nightly-import permit process failed') + } +} + +async function _returnVersionsProcess (allResults) { + try { + const { modLogs, returnVersions } = allResults + + if (!modLogs.completed) { + return + } + + returnVersions.attempted = true + returnVersions.completed = await ReturnVersionsProcessSteps.go() + } catch (error) { + global.GlobalNotifier.oops('nightly-import return-versions process failed') + } +} + +function _timeMessage (secondsAsBigInt) { + const seconds = Number(secondsAsBigInt) + const hours = Math.floor(seconds / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + const remainingSeconds = seconds % 60 + + return `Time taken: ${hours} hours, ${minutes} minutes, and ${remainingSeconds} seconds` +} + +async function _trackerProcess (emailMessage) { + try { + await TrackerProcessSteps.go(emailMessage) + } catch (error) { + global.GlobalNotifier.oops('nightly-import tracker process failed') + } +} + +module.exports = { + go +} diff --git a/src/modules/nightly-import/routes.js b/src/modules/nightly-import/routes.js new file mode 100644 index 00000000..dc375f62 --- /dev/null +++ b/src/modules/nightly-import/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers.js') + +const routes = [ + { + method: 'post', + handler: controllers.nightlyImport, + path: '/nightly-import' + } +] + +module.exports = routes diff --git a/src/modules/permit/controllers.js b/src/modules/permit/controllers.js new file mode 100644 index 00000000..68fd7eee --- /dev/null +++ b/src/modules/permit/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function permit (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + permit +} diff --git a/src/modules/permit/lib/address-queries.js b/src/modules/permit/lib/address-queries.js new file mode 100644 index 00000000..f709b8e6 --- /dev/null +++ b/src/modules/permit/lib/address-queries.js @@ -0,0 +1,13 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getAddress (addressId, regionCode) { + const query = 'SELECT a.* FROM "import"."NALD_ADDRESSES" a WHERE "ID"=$1 AND "FGAC_REGION_CODE" = $2;' + + return db.query(query, [addressId, regionCode]) +} + +module.exports = { + getAddress +} diff --git a/src/modules/permit/lib/cam-queries.js b/src/modules/permit/lib/cam-queries.js new file mode 100644 index 00000000..944fdebd --- /dev/null +++ b/src/modules/permit/lib/cam-queries.js @@ -0,0 +1,13 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getCams (code, regionCode) { + const query = 'SELECT * FROM "import"."NALD_REP_UNITS" WHERE "CODE" = $1 AND "FGAC_REGION_CODE" = $2;' + + return db.query(query, [code, regionCode]) +} + +module.exports = { + getCams +} diff --git a/src/modules/permit/lib/licence-queries.js b/src/modules/permit/lib/licence-queries.js new file mode 100644 index 00000000..f3e0c8c1 --- /dev/null +++ b/src/modules/permit/lib/licence-queries.js @@ -0,0 +1,97 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getLicence (licenceNo) { + const query = ` + SELECT + l.*, + to_date(nullif(l."ORIG_EFF_DATE", 'null'), 'DD/MM/YYYY') AS start_date, + LEAST( + to_date(nullif(l."EXPIRY_DATE", 'null'), 'DD/MM/YYYY'), + to_date(nullif(l."REV_DATE", 'null'), 'DD/MM/YYYY'), + to_date(nullif(l."LAPSED_DATE", 'null'), 'DD/MM/YYYY') + ) AS end_date + FROM + "import"."NALD_ABS_LICENCES" l + WHERE + l."LIC_NO" = $1; + ` + + return db.query(query, [licenceNo]) +} + +async function getCurrentVersion (licenceId, regionCode) { + const query = ` + SELECT v.*, t.* + FROM import."NALD_ABS_LIC_VERSIONS" v + JOIN import."NALD_WA_LIC_TYPES" t + ON v."WA_ALTY_CODE" = t."CODE" + JOIN import."NALD_ABS_LICENCES" l + ON v."AABL_ID" = l."ID" + AND l."FGAC_REGION_CODE" = v."FGAC_REGION_CODE" + WHERE "AABL_ID" = $1 + AND v."FGAC_REGION_CODE" = $2 + AND ( + 0 = 0 + AND "EFF_END_DATE" = 'null' + OR to_date( "EFF_END_DATE", 'DD/MM/YYYY' ) > now() + ) + AND ( + 0 = 0 + AND v."STATUS" = 'CURR' + AND ( + l."EXPIRY_DATE" = 'null' + OR to_date(l."EXPIRY_DATE", 'DD/MM/YYYY') > NOW() + ) + AND ( + l."LAPSED_DATE" = 'null' OR to_date(l."LAPSED_DATE", 'DD/MM/YYYY') > NOW() + ) + AND ( + l."REV_DATE" = 'null' OR to_date(l."REV_DATE", 'DD/MM/YYYY') > NOW() + ) + AND ( + v."EFF_ST_DATE"='null' OR to_date(v."EFF_ST_DATE", 'DD/MM/YYYY') <= NOW() + ) + ) + ORDER BY "ISSUE_NO" DESC, "INCR_NO" DESC + LIMIT 1; + ` + + const rows = await db.query(query, [licenceId, regionCode]) + + return rows.length ? rows[0] : null +} + +async function getVersions (licenceId, regionCode) { + const query = 'SELECT * FROM "import"."NALD_ABS_LIC_VERSIONS" WHERE "AABL_ID" = $1 AND "FGAC_REGION_CODE" = $2;' + + return db.query(query, [licenceId, regionCode]) +} + +async function getCurrentFormats (licenceId, regionCode) { + const query = ` + SELECT + f.* + FROM + "import"."NALD_RET_VERSIONS" rv + JOIN + "import"."NALD_RET_FORMATS" f + ON f."ARVN_AABL_ID" = $1 + AND f."FGAC_REGION_CODE" = $2 + AND rv."VERS_NO" = f."ARVN_VERS_NO" + WHERE + rv."AABL_ID" = $1 + AND rv."FGAC_REGION_CODE" = $2 + AND rv."STATUS" = 'CURR'; + ` + + return db.query(query, [licenceId, regionCode]) +} + +module.exports = { + getLicence, + getCurrentVersion, + getVersions, + getCurrentFormats +} diff --git a/src/modules/permit/lib/loader.js b/src/modules/permit/lib/loader.js new file mode 100644 index 00000000..571a1680 --- /dev/null +++ b/src/modules/permit/lib/loader.js @@ -0,0 +1,113 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { generateUUID } = require('../../../lib/general.js') +const { buildCRMPacket } = require('./transform-crm') +const { getLicenceJson, buildPermitRepoPacket } = require('./transform-permit') + +async function load (licenceNumber) { + try { + const licenceData = await getLicenceJson(licenceNumber) + + if (licenceData.data.versions.length === 0) { + return null + } + + const permitRepoId = await _loadPermit(licenceNumber, licenceData) + + return _loadDocumentHeader(licenceNumber, licenceData, permitRepoId) + } catch (error) { + global.GlobalNotifier.omfg('permit.import errored', error, { licenceNumber }) + throw error + } +} + +async function _loadDocumentHeader (licenceNumber, licenceData, permitRepoId) { + const crmData = buildCRMPacket(licenceData, licenceNumber, permitRepoId) + + return _persistDocumentHeader(crmData) +} + +async function _loadPermit (licenceNumber, licenceData) { + const permit = buildPermitRepoPacket(licenceNumber, 1, 8, licenceData) + + const results = await _persistPermit(permit) + + return results[0].licence_id +} + +async function _persistDocumentHeader (documentHeader) { + const params = [ + generateUUID(), + documentHeader.regime_entity_id, + documentHeader.system_id, + documentHeader.system_internal_id, + documentHeader.system_external_id, + documentHeader.metadata + ] + + const query = ` + INSERT INTO crm.document_header ( + document_id, + regime_entity_id, + system_id, + system_internal_id, + system_external_id, + metadata + ) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT ( + system_id, + system_internal_id, + regime_entity_id + ) + DO UPDATE SET + system_external_id = EXCLUDED.system_external_id, + metadata = EXCLUDED.metadata; + ` + + return db.query(query, params) +} + +async function _persistPermit (permit) { + const params = [ + permit.licence_ref, + permit.licence_start_dt, + permit.licence_end_dt, + permit.licence_status_id, + permit.licence_type_id, + permit.licence_regime_id, + permit.licence_data_value + ] + + const query = ` + INSERT INTO permit.licence ( + licence_ref, + licence_start_dt, + licence_end_dt, + licence_status_id, + licence_type_id, + licence_regime_id, + licence_data_value + ) + VALUES ($1, $2, $3, $4, $5, $6, $7) + ON CONFLICT ( + licence_regime_id, + licence_type_id, + licence_ref + ) DO UPDATE SET + licence_status_id = EXCLUDED.licence_status_id, + licence_search_key = EXCLUDED.licence_search_key, + is_public_domain = EXCLUDED.is_public_domain, + licence_start_dt = EXCLUDED.licence_start_dt, + licence_end_dt = EXCLUDED.licence_end_dt, + licence_data_value = EXCLUDED.licence_data_value + RETURNING licence_id; + ` + + return db.query(query, params) +} + +module.exports = { + load +} diff --git a/src/modules/permit/lib/party-queries.js b/src/modules/permit/lib/party-queries.js new file mode 100644 index 00000000..169d7f06 --- /dev/null +++ b/src/modules/permit/lib/party-queries.js @@ -0,0 +1,39 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getParties (partyId, regionCode) { + const query = 'SELECT p.* from "import"."NALD_PARTIES" p WHERE p."ID" = $1 AND "FGAC_REGION_CODE" = $2;' + + return db.query(query, [partyId, regionCode]) +} + +async function getPartyContacts (partyId, regionCode) { + const query = ` + SELECT + c.*, row_to_json(a.*) AS party_address + FROM + "import"."NALD_CONTACTS" c + LEFT JOIN + "import"."NALD_ADDRESSES" a + ON a."ID" = c."AADD_ID" + WHERE + c."APAR_ID" = $1 + AND c."FGAC_REGION_CODE" = $2 + AND a."FGAC_REGION_CODE" = $2; + ` + + return db.query(query, [partyId, regionCode]) +} + +async function getParty (partyId, regionCode) { + const query = 'SELECT p.* FROM "import"."NALD_PARTIES" p WHERE "ID" = $1 AND "FGAC_REGION_CODE" = $2;' + + return db.query(query, [partyId, regionCode]) +} + +module.exports = { + getParties, + getParty, + getPartyContacts +} diff --git a/src/modules/permit/lib/purpose-queries.js b/src/modules/permit/lib/purpose-queries.js new file mode 100644 index 00000000..e9c7d507 --- /dev/null +++ b/src/modules/permit/lib/purpose-queries.js @@ -0,0 +1,97 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getPurposes (licenceId, regionCode, issueNo, incrementNo) { + let params = [licenceId, regionCode] + let query = 'SELECT * FROM "import"."NALD_ABS_LIC_PURPOSES" WHERE "AABV_AABL_ID" = $1 AND "FGAC_REGION_CODE" = $2;' + + if (issueNo && incrementNo) { + query = ` + SELECT + * + FROM + "import"."NALD_ABS_LIC_PURPOSES" + WHERE + "AABV_AABL_ID" = $1 + AND "FGAC_REGION_CODE" = $2 + AND "AABV_ISSUE_NO" = $3 + AND "AABV_INCR_NO" = $4; + ` + params = [licenceId, regionCode, issueNo, incrementNo] + } + + return db.query(query, params) +} + +async function getPurposePoints (purposeId, regionCode) { + const query = ` + SELECT + pp.*, + row_to_json(m.*) AS means_of_abstraction, + row_to_json(p.*) AS point_detail, + row_to_json(s.*) AS point_source + FROM + import."NALD_ABS_PURP_POINTS" pp + LEFT JOIN "import"."NALD_MEANS_OF_ABS" m ON m."CODE" = pp."AMOA_CODE" + LEFT JOIN "import"."NALD_POINTS" p ON p."ID" = pp."AAIP_ID" + LEFT JOIN "import"."NALD_SOURCES" s ON s."CODE" = p."ASRC_CODE" + WHERE + pp."AABP_ID" = $1 + AND pp."FGAC_REGION_CODE" = $2 + AND p."FGAC_REGION_CODE" = $2; + ` + + return db.query(query, [purposeId, regionCode]) +} + +async function getPurpose (purpose) { + const { primary, secondary, tertiary } = purpose + const query = ` + SELECT + row_to_json(p1.*) AS purpose_primary, + row_to_json(p2.*) AS purpose_secondary, + row_to_json(p3.*) AS purpose_tertiary + FROM + "import"."NALD_PURP_PRIMS" p1 + LEFT JOIN "import"."NALD_PURP_SECS" p2 ON p2."CODE" = $2 + LEFT JOIN "import"."NALD_PURP_USES" p3 ON p3."CODE" = $3 + WHERE + p1."CODE" = $1; + ` + + return db.query(query, [primary, secondary, tertiary]) +} + +async function getPurposePointLicenceAgreements (licenceId, regionCode) { + const query = 'SELECT * FROM "import"."NALD_LIC_AGRMNTS" WHERE "AABP_ID" = $1 AND "FGAC_REGION_CODE" = $2;' + + return db.query(query, [licenceId, regionCode]) +} + +const getPurposePointLicenceConditions = async (licenceId, regionCode) => { + const query = ` + SELECT + c.*, row_to_json(ct.*) AS condition_type + FROM + "import"."NALD_LIC_CONDITIONS" c + LEFT JOIN "import"."NALD_LIC_COND_TYPES" ct + ON ct."CODE" = c."ACIN_CODE" + AND ct."SUBCODE" = c."ACIN_SUBCODE" + where + c."AABP_ID" = $1 + AND c."FGAC_REGION_CODE" = $2 + order by + "DISP_ORD" asc; + ` + + return db.query(query, [licenceId, regionCode]) +} + +module.exports = { + getPurpose, + getPurposePointLicenceAgreements, + getPurposePointLicenceConditions, + getPurposePoints, + getPurposes +} diff --git a/src/modules/permit/lib/return-queries.js b/src/modules/permit/lib/return-queries.js new file mode 100644 index 00000000..f09cb646 --- /dev/null +++ b/src/modules/permit/lib/return-queries.js @@ -0,0 +1,47 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getFormatPoints (formatId, regionCode) { + const query = ` + SELECT + p.* + FROM + "import"."NALD_RET_FMT_POINTS" fp + LEFT JOIN "import"."NALD_POINTS" p + ON fp."AAIP_ID" = p."ID" + AND fp."FGAC_REGION_CODE" = p."FGAC_REGION_CODE" + WHERE + fp."ARTY_ID" = $1 + AND fp."FGAC_REGION_CODE" = $2; + ` + + return db.query(query, [formatId, regionCode]) +} + +async function getFormatPurposes (formatId, regionCode) { + const query = ` + SELECT p.*, + p1."DESCR" AS primary_purpose, + p2."DESCR" AS secondary_purpose, + p3."DESCR" AS tertiary_purpose + FROM + "import"."NALD_RET_FMT_PURPOSES" p + LEFT JOIN "import"."NALD_PURP_PRIMS" p1 + ON p."APUR_APPR_CODE" = p1."CODE" + LEFT JOIN "import"."NALD_PURP_SECS" p2 + ON p."APUR_APSE_CODE" = p2."CODE" + LEFT JOIN "import"."NALD_PURP_USES" p3 + ON p."APUR_APUS_CODE" = p3."CODE" + WHERE + p."ARTY_ID" = $1 + AND p."FGAC_REGION_CODE" = $2; + ` + + return db.query(query, [formatId, regionCode]) +} + +module.exports = { + getFormatPurposes, + getFormatPoints +} diff --git a/src/modules/permit/lib/role-queries.js b/src/modules/permit/lib/role-queries.js new file mode 100644 index 00000000..b29417a9 --- /dev/null +++ b/src/modules/permit/lib/role-queries.js @@ -0,0 +1,46 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') + +async function getRoles (licenceId, regionCode) { + const query = ` + SELECT + row_to_json(r.*) AS role_detail, + row_to_json(t.*) AS role_type, + row_to_json(p.*) AS role_party, + row_to_json(a.*) AS role_address, + array( + SELECT + row_to_json(x.*) AS contact_data + FROM ( + SELECT + * + FROM + "import"."NALD_CONT_NOS" c + LEFT JOIN "import"."NALD_CONT_NO_TYPES" ct + ON c."ACNT_CODE" = ct."CODE" + WHERE + r."ACON_APAR_ID" = c."ACON_APAR_ID" + AND r."ACON_AADD_ID" = c."ACON_AADD_ID" + AND c."FGAC_REGION_CODE" = $2 + ) x + ) + FROM + "import"."NALD_LIC_ROLES" r + LEFT JOIN "import"."NALD_LIC_ROLE_TYPES" t ON r."ALRT_CODE" = t."CODE" + LEFT JOIN "import"."NALD_PARTIES" p ON r."ACON_APAR_ID" = p."ID" + LEFT JOIN "import"."NALD_ADDRESSES" a ON r."ACON_AADD_ID" = a."ID" + WHERE + r."AABL_ID"=$1 + AND r."FGAC_REGION_CODE" = $2 + AND p."FGAC_REGION_CODE" = $2 + AND a."FGAC_REGION_CODE" = $2 + AND (r."EFF_END_DATE" = 'null' OR to_date(r."EFF_END_DATE", 'DD/MM/YYYY') > now()); + ` + + return db.query(query, [licenceId, regionCode]) +} + +module.exports = { + getRoles +} diff --git a/src/modules/nald-import/transform-crm.js b/src/modules/permit/lib/transform-crm.js similarity index 85% rename from src/modules/nald-import/transform-crm.js rename to src/modules/permit/lib/transform-crm.js index 4dac57db..3cd3745a 100644 --- a/src/modules/nald-import/transform-crm.js +++ b/src/modules/permit/lib/transform-crm.js @@ -1,61 +1,30 @@ -/** - * Transform data for loading into CRM - */ +'use strict' const { findCurrent, transformNull } = require('@envage/water-abstraction-helpers').nald const { addressFormatter, crmNameFormatter } = require('@envage/water-abstraction-helpers').nald.formatting const { sentenceCase } = require('sentence-case') /** - * Contacts formatter - * Creates a list of contacts from the roles/parties in the NALD data - * @param {Object} currentVersion - note must be from versions array not current_version - * @param {Array} roles - * @return {Array} formatted contacts + * Builds CRM packet ready for posting to CRM + * @param {Object} licenceData - permit repo licence data + * @param {String} licenceRef - the licence number + * @param {Number} licenceId - the permit repo licence ID + * @return {Object} - object containing of row of data for CRM */ -const contactsFormatter = (currentVersion, roles) => { - if (!currentVersion) { - return [] +function buildCRMPacket (licenceData, licenceRef, licenceId) { + const crmData = { + regime_entity_id: '0434dc31-a34e-7158-5775-4694af7a60cf', + system_id: 'permit-repo', + system_internal_id: licenceId, + system_external_id: licenceRef } - const licenceHolderParty = currentVersion.parties.find((party) => { - return party.ID === currentVersion.ACON_APAR_ID - }) - - const licenceHolderAddress = licenceHolderParty.contacts.find((contact) => { - return contact.AADD_ID === currentVersion.ACON_AADD_ID - }) - - const contacts = [{ - role: 'Licence holder', - ...crmNameFormatter(licenceHolderParty), - ...addressFormatter(licenceHolderAddress.party_address) - }] - - roles.forEach((role) => { - contacts.push({ - role: sentenceCase(role.role_type.DESCR), - ...crmNameFormatter(role.role_party), - ...addressFormatter(role.role_address) - }) - }) - - return transformNull(contacts) -} - -/** - * Data from NALD import has null as "null" string - * prune this to empty value - */ -function pruneNullString (data) { - const mappedValues = {} - for (const key in data) { - if (data[key] === 'null') { - mappedValues[key] = '' - } else mappedValues[key] = data[key] - } + const currentVersion = licenceData.data.current_version + const metadata = _buildCRMMetadata(currentVersion) + metadata.contacts = _contactsFormatter(findCurrent(licenceData.data.versions), licenceData.data.roles) + crmData.metadata = JSON.stringify(metadata) - return mappedValues + return crmData } /** @@ -63,7 +32,7 @@ function pruneNullString (data) { * @param {Object} currentVersion * @return {Object} contact metadata */ -function buildCRMContactMetadata (currentVersion) { +function _buildCRMContactMetadata (currentVersion) { const party = currentVersion.party const address = currentVersion.address return { @@ -87,7 +56,7 @@ function buildCRMContactMetadata (currentVersion) { * @param {Object} currentVersion * @return {Object} CRM metadata object */ -function buildCRMMetadata (currentVersion) { +function _buildCRMMetadata (currentVersion) { if (!currentVersion) { return { IsCurrent: false @@ -95,7 +64,7 @@ function buildCRMMetadata (currentVersion) { } const expires = currentVersion.expiry_date const modified = currentVersion.version_effective_date - const contact = buildCRMContactMetadata(currentVersion) + const contact = _buildCRMContactMetadata(currentVersion) const data = { ...contact, @@ -103,34 +72,61 @@ function buildCRMMetadata (currentVersion) { Modified: modified, IsCurrent: true } - return pruneNullString(data) + return _pruneNullString(data) } /** - * Builds CRM packet ready for posting to CRM - * @param {Object} licenceData - permit repo licence data - * @param {String} licenceRef - the licence number - * @param {Number} licenceId - the permit repo licence ID - * @return {Object} - object containing of row of data for CRM + * Contacts formatter + * Creates a list of contacts from the roles/parties in the NALD data + * @param {Object} currentVersion - note must be from versions array not current_version + * @param {Array} roles + * @return {Array} formatted contacts */ -function buildCRMPacket (licenceData, licenceRef, licenceId) { - const crmData = { - regime_entity_id: '0434dc31-a34e-7158-5775-4694af7a60cf', - system_id: 'permit-repo', - system_internal_id: licenceId, - system_external_id: licenceRef +function _contactsFormatter (currentVersion, roles) { + if (!currentVersion) { + return [] } - const currentVersion = licenceData.data.current_version - const metadata = buildCRMMetadata(currentVersion) - metadata.contacts = contactsFormatter(findCurrent(licenceData.data.versions), licenceData.data.roles) - crmData.metadata = JSON.stringify(metadata) + const licenceHolderParty = currentVersion.parties.find((party) => { + return party.ID === currentVersion.ACON_APAR_ID + }) - return crmData + const licenceHolderAddress = licenceHolderParty.contacts.find((contact) => { + return contact.AADD_ID === currentVersion.ACON_AADD_ID + }) + + const contacts = [{ + role: 'Licence holder', + ...crmNameFormatter(licenceHolderParty), + ...addressFormatter(licenceHolderAddress.party_address) + }] + + roles.forEach((role) => { + contacts.push({ + role: sentenceCase(role.role_type.DESCR), + ...crmNameFormatter(role.role_party), + ...addressFormatter(role.role_address) + }) + }) + + return transformNull(contacts) +} + +/** + * Data from NALD import has null as "null" string + * prune this to empty value + */ +function _pruneNullString (data) { + const mappedValues = {} + for (const key in data) { + if (data[key] === 'null') { + mappedValues[key] = '' + } else mappedValues[key] = data[key] + } + + return mappedValues } module.exports = { - buildCRMPacket, - buildCRMMetadata, - contactsFormatter + buildCRMPacket } diff --git a/src/modules/permit/lib/transform-permit.js b/src/modules/permit/lib/transform-permit.js new file mode 100644 index 00000000..73934b9e --- /dev/null +++ b/src/modules/permit/lib/transform-permit.js @@ -0,0 +1,217 @@ +'use strict' + +const NaldDates = require('@envage/water-abstraction-helpers').nald.dates +const moment = require('moment') + +const AddressQueries = require('./address-queries.js') +const CamQueries = require('./cam-queries.js') +const LicenceQueries = require('./licence-queries.js') +const PartyQueries = require('./party-queries.js') +const PurposeQueries = require('./purpose-queries.js') +const ReturnQueries = require('./return-queries.js') +const RoleQueries = require('./role-queries.js') + +/** + * Build packet of data to post to permit repository + * @param {String} licenceRef - the licence number + * @param {Number} regimeId - the numeric ID of the permitting regime + * @param {Number} licenceTypeId - the ID of the licence type, e.g abstraction, impoundment etc + * @param {Object} data - the licence JS object data + * @return {Object} - packet of data for posting to permit repo + */ +function buildPermitRepoPacket (licenceRef, regimeId, licenceTypeId, data) { + const latestVersion = _latestVersion(data.data.versions) + const permitRepoData = { + licence_ref: licenceRef, + licence_start_dt: NaldDates.calendarToSortable(latestVersion.EFF_ST_DATE), + licence_end_dt: _endDate(data), + licence_status_id: '1', + licence_type_id: licenceTypeId, + licence_regime_id: regimeId, + licence_data_value: JSON.stringify(data) + } + + // remove null attributes so as not to anger JOI + if (permitRepoData.licence_end_dt == null) { + delete permitRepoData.licence_end_dt + } + + if (permitRepoData.licence_start_dt == null) { + delete permitRepoData.licence_start_dt + } + return permitRepoData +} + +/** + * Build full licence JSON for storing in permit repo from NALD import tables + * @param {String} licenceNumber + * @return {Promise} resolves with permit repo JSON packet + */ +async function getLicenceJson (licenceNumber) { + const data = await LicenceQueries.getLicence(licenceNumber) + const licenceRow = data[0] + + licenceRow.vmlVersion = 2 + licenceRow.data = {} + licenceRow.data.versions = await _versionsJson(licenceRow) + licenceRow.data.current_version = await _currentVersionJson(licenceRow) + licenceRow.data.cams = await CamQueries.getCams(licenceRow.AREP_CAMS_CODE, licenceRow.FGAC_REGION_CODE) + licenceRow.data.roles = await RoleQueries.getRoles(licenceRow.ID, licenceRow.FGAC_REGION_CODE) + licenceRow.data.purposes = await _purposesJson(licenceRow) + + return licenceRow +} + +function _calculateVersionScore (version) { + // We * it by 1000 so ISSUE_NO goes to the top + const issueNo = 1000 * parseInt(version.ISSUE_NO, 10) + const incrNo = parseInt(version.INCR_NO, 10) + return issueNo + incrNo +} + +/** + * Gets the JSON for the current version of the licence (if available) + * @param {Object} licenceRow + * return {Promise} resolves with object of current version, or null + */ +async function _currentVersionJson (licenceRow) { + const regionCode = licenceRow.FGAC_REGION_CODE + const currentVersion = await LicenceQueries.getCurrentVersion(licenceRow.ID, regionCode) + + if (currentVersion) { + const data = { + licence: currentVersion + } + + data.licence.party = await PartyQueries.getParties(currentVersion.ACON_APAR_ID, regionCode) + + for (const p in data.licence.parties) { + data.licence.parties[p].contacts = await PartyQueries.getPartyContacts(currentVersion.parties[p].ID, regionCode) + } + data.party = (await PartyQueries.getParty(currentVersion.ACON_APAR_ID, regionCode))[0] + data.address = (await AddressQueries.getAddress(currentVersion.ACON_AADD_ID, regionCode))[0] + data.original_effective_date = NaldDates.calendarToSortable(licenceRow.ORIG_EFF_DATE) + data.version_effective_date = NaldDates.calendarToSortable(currentVersion.EFF_ST_DATE) + data.expiry_date = NaldDates.calendarToSortable(licenceRow.EXPIRY_DATE) + + data.purposes = await _purposesJson(licenceRow, currentVersion) + data.formats = await _returnFormats(licenceRow.ID, regionCode) + + return data + } + + return null +} + +/** + * End date is the minimum of expiry date, revoked date and lapsed date + * @param {Object} data - licence data + * @return {String} date YYYY-MM-DD or null + */ +function _endDate (data = {}) { + const dates = [ + data.EXPIRY_DATE, + data.REV_DATE, + data.LAPSED_DATE + ] + + const filteredDates = dates.filter((naldDate) => { + return moment(naldDate, 'DD/MM/YYYY').isValid() + }) + + const formattedFilteredDates = filteredDates.map((filteredDate) => { + return moment(filteredDate, 'DD/MM/YYYY').format('YYYY-MM-DD') + }) + + const sortedFormattedFilteredDates = formattedFilteredDates.sort() + + return sortedFormattedFilteredDates[0] +} + +/** + * Gets the latest version of the specified licence data + * by sorting on the effective start date of the versions array + * - Note: this may not be the current version + * @param {Array} versions - licence data versions array from NALD import process + * @return {Object} latest version + */ +function _latestVersion (versions) { + versions.sort((version1, version2) => { + const total1 = _calculateVersionScore(version1) + const total2 = _calculateVersionScore(version2) + + if (total1 < total2) return 1 + if (total1 > total2) return -1 + return 0 + }) + + return versions[0] +} + +/** + * Gets the purposes together with their points, agreements and conditions + * for the specified current version + * @param {Object} licenceRow + * @param {Object} [currentVersion] - optional current version + * @return {Promise} + */ +async function _purposesJson (licenceRow, currentVersion = null) { + const regionCode = licenceRow.FGAC_REGION_CODE + let purposes + if (currentVersion) { + purposes = await PurposeQueries.getPurposes(licenceRow.ID, regionCode, currentVersion.ISSUE_NO, currentVersion.INCR_NO) + } else { + purposes = await PurposeQueries.getPurposes(licenceRow.ID, regionCode) + } + + for (const purpose of purposes) { + purpose.purpose = await PurposeQueries.getPurpose({ + primary: purpose.APUR_APPR_CODE, + secondary: purpose.APUR_APSE_CODE, + tertiary: purpose.APUR_APUS_CODE + }) + purpose.purposePoints = await PurposeQueries.getPurposePoints(purpose.ID, regionCode) + purpose.licenceAgreements = await PurposeQueries.getPurposePointLicenceAgreements(purpose.ID, regionCode) + purpose.licenceConditions = await PurposeQueries.getPurposePointLicenceConditions(purpose.ID, regionCode) + } + return purposes +} + +/** + * Gets current return formats for specified licence ID and region code + * @param {Number} licenceId - from NALD_ABS_LICENCES table + * @param {Number} regionCode - FGAC_REGION_CODE + * @return {Promise} resolves with formats and purposes/points + */ +async function _returnFormats (licenceId, regionCode) { + const formats = await LicenceQueries.getCurrentFormats(licenceId, regionCode) + + for (const format of formats) { + format.points = await ReturnQueries.getFormatPoints(format.ID, regionCode) + format.purposes = await ReturnQueries.getFormatPurposes(format.ID, regionCode) + } + + return formats +} + +/** + * Gets all licence versions (including current) + * @param {Object} licenceRow + * @return {Promise} resolves with versions array + */ +async function _versionsJson (licenceRow) { + const versions = await LicenceQueries.getVersions(licenceRow.ID, licenceRow.FGAC_REGION_CODE) + + for (const version of versions) { + version.parties = await PartyQueries.getParties(version.ACON_APAR_ID, licenceRow.FGAC_REGION_CODE) + for (const party of version.parties) { + party.contacts = await PartyQueries.getPartyContacts(party.ID, licenceRow.FGAC_REGION_CODE) + } + } + return versions +} + +module.exports = { + buildPermitRepoPacket, + getLicenceJson +} diff --git a/src/modules/permit/process-steps.js b/src/modules/permit/process-steps.js new file mode 100644 index 00000000..9b7511d4 --- /dev/null +++ b/src/modules/permit/process-steps.js @@ -0,0 +1,30 @@ +'use strict' + +const ImportStep = require('./steps/import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + let counts = {} + + try { + global.GlobalNotifier.omg('permit started') + + const startTime = currentTimeInNanoseconds() + + counts = await ImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'permit complete') + } catch (error) { + global.GlobalNotifier.oops('permit failed') + } + + return { processComplete, counts } +} + +module.exports = { + go +} diff --git a/src/modules/permit/routes.js b/src/modules/permit/routes.js new file mode 100644 index 00000000..7ec7d08b --- /dev/null +++ b/src/modules/permit/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers.js') + +const routes = [ + { + method: 'post', + handler: controllers.permit, + path: '/permit' + } +] + +module.exports = routes diff --git a/src/modules/permit/steps/import.js b/src/modules/permit/steps/import.js new file mode 100644 index 00000000..5a4b83d4 --- /dev/null +++ b/src/modules/permit/steps/import.js @@ -0,0 +1,73 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const Loader = require('../lib/loader.js') + +const config = require('../../../../config.js') + +const PROGRESS_TICK = 1000 + +async function go () { + let count = 0 + let rejected = 0 + + try { + global.GlobalNotifier.omg('permit.import started') + + const startTime = currentTimeInNanoseconds() + + const licenceReferences = await _licenceReferences() + count = licenceReferences.length + + rejected = await _import(licenceReferences, count) + + calculateAndLogTimeTaken(startTime, 'permit.import complete', { count, rejected }) + } catch (error) { + global.GlobalNotifier.omfg('permit.import errored', error, { count, rejected }) + throw error + } + + return { count, rejected } +} + +async function _import (licenceReferences, count) { + const batchSize = config.processBatchSize + + let progress = PROGRESS_TICK + let rejected = 0 + + for (let i = 0; i < count; i += batchSize) { + if (i === progress) { + progress = progress + PROGRESS_TICK + global.GlobalNotifier.omg(`permit.import progress (${i} of ${count})`) + } + + const referenceToProcess = licenceReferences.slice(i, i + batchSize) + + const processes = referenceToProcess.map((referenceToProcess) => { + return Loader.load(referenceToProcess.LIC_NO) + }) + + const results = await Promise.allSettled(processes) + const rejectedResults = results.filter((result) => { + return result.status === 'rejected' + }) + + if (rejectedResults.length === batchSize) { + throw new Error('Whole batch rejected') + } + + rejected += rejectedResults.length + } + + return rejected +} + +async function _licenceReferences () { + return db.query('SELECT "LIC_NO" FROM "import"."NALD_ABS_LICENCES";') +} + +module.exports = { + go +} diff --git a/src/modules/reference/controllers.js b/src/modules/reference/controllers.js new file mode 100644 index 00000000..2922955d --- /dev/null +++ b/src/modules/reference/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function reference (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + reference +} diff --git a/src/modules/licence-import/connectors/queries/purpose-condition-types.js b/src/modules/reference/lib/condition-type-queries.js similarity index 74% rename from src/modules/licence-import/connectors/queries/purpose-condition-types.js rename to src/modules/reference/lib/condition-type-queries.js index 8fc249e0..04cde1fb 100644 --- a/src/modules/licence-import/connectors/queries/purpose-condition-types.js +++ b/src/modules/reference/lib/condition-type-queries.js @@ -1,12 +1,14 @@ -const createPurposeConditionTypes = ` +'use strict' + +const purposeConditionTypes = ` INSERT INTO water.licence_version_purpose_condition_types ( code, subcode, description, subcode_description - ) - SELECT "CODE", "SUBCODE", "DESCR", "SUBCODE_DESC" FROM import."NALD_LIC_COND_TYPES" - WHERE "AFFECTS_ABS" = 'Y' + ) + SELECT "CODE", "SUBCODE", "DESCR", "SUBCODE_DESC" FROM import."NALD_LIC_COND_TYPES" + WHERE "AFFECTS_ABS" = 'Y' ON CONFLICT (code, subcode) DO UPDATE SET description = excluded.description, @@ -15,5 +17,5 @@ INSERT INTO water.licence_version_purpose_condition_types ( ` module.exports = { - createPurposeConditionTypes + purposeConditionTypes } diff --git a/src/modules/charging-import/lib/queries/financial-agreement-types.js b/src/modules/reference/lib/financial-agreement-type-queries.js similarity index 84% rename from src/modules/charging-import/lib/queries/financial-agreement-types.js rename to src/modules/reference/lib/financial-agreement-type-queries.js index 974e15e0..ebb5f4aa 100644 --- a/src/modules/charging-import/lib/queries/financial-agreement-types.js +++ b/src/modules/reference/lib/financial-agreement-type-queries.js @@ -1,4 +1,6 @@ -const importFinancialAgreementTypes = ` +'use strict' + +const financialAgreementTypes = ` INSERT INTO water.financial_agreement_types (financial_agreement_code, description, disabled, date_created, date_updated) SELECT a."CODE", a."DESCR", a."DISABLED"::boolean, now(), now() FROM import."NALD_FIN_AGRMNT_TYPES" a @@ -12,5 +14,5 @@ DO ` module.exports = { - importFinancialAgreementTypes + financialAgreementTypes } diff --git a/src/modules/charging-import/lib/queries/purposes.js b/src/modules/reference/lib/purpose-queries.js similarity index 62% rename from src/modules/charging-import/lib/queries/purposes.js rename to src/modules/reference/lib/purpose-queries.js index 71477f13..357040ae 100644 --- a/src/modules/charging-import/lib/queries/purposes.js +++ b/src/modules/reference/lib/purpose-queries.js @@ -1,18 +1,12 @@ 'use strict' -const importPrimaryPurposes = `insert into water.purposes_primary (legacy_id, description, date_created, date_updated) +const primaryPurposes = `insert into water.purposes_primary (legacy_id, description, date_created, date_updated) select p."CODE", p."DESCR", now(), now() from import."NALD_PURP_PRIMS" p on conflict (legacy_id) do update set description= excluded.description, date_updated = now();` -const importSecondaryPurposes = ` - insert into water.purposes_secondary (legacy_id, description, date_created, date_updated) - select p."CODE", p."DESCR", now(), now() - from import."NALD_PURP_SECS" p on conflict (legacy_id) do update set - description= excluded.description, date_updated = now();` - -const importUses = `insert into water.purposes_uses ( +const purposes = `insert into water.purposes_uses ( legacy_id, description, date_created, @@ -40,20 +34,26 @@ const importUses = `insert into water.purposes_uses ( loss_factor = excluded.loss_factor, is_two_part_tariff = excluded.is_two_part_tariff;` -const importValidPurposeCombinations = `INSERT INTO water.purposes as prps (purpose_primary_id, purpose_secondary_id, purpose_use_id, date_created) -SELECT pp.purpose_primary_id, -ps.purpose_secondary_id, -pu.purpose_use_id, -now() -FROM import."NALD_PURPOSES" as NALD_P -JOIN water.purposes_primary as pp ON NALD_P."APPR_CODE" = pp."legacy_id" -JOIN water.purposes_secondary as ps ON NALD_P."APSE_CODE" = ps."legacy_id" -JOIN water.purposes_uses as pu ON NALD_P."APUS_CODE" = pu."legacy_id" -WHERE NALD_P."DISABLED" = 'N' ON CONFLICT DO NOTHING;` +const secondaryPurposes = ` + insert into water.purposes_secondary (legacy_id, description, date_created, date_updated) + select p."CODE", p."DESCR", now(), now() + from import."NALD_PURP_SECS" p on conflict (legacy_id) do update set + description= excluded.description, date_updated = now();` + +const validPurposeCombinations = `INSERT INTO water.purposes as prps (purpose_primary_id, purpose_secondary_id, purpose_use_id, date_created) + SELECT pp.purpose_primary_id, + ps.purpose_secondary_id, + pu.purpose_use_id, + now() + FROM import."NALD_PURPOSES" as NALD_P + JOIN water.purposes_primary as pp ON NALD_P."APPR_CODE" = pp."legacy_id" + JOIN water.purposes_secondary as ps ON NALD_P."APSE_CODE" = ps."legacy_id" + JOIN water.purposes_uses as pu ON NALD_P."APUS_CODE" = pu."legacy_id" + WHERE NALD_P."DISABLED" = 'N' ON CONFLICT DO NOTHING;` module.exports = { - importPrimaryPurposes, - importSecondaryPurposes, - importUses, - importValidPurposeCombinations + primaryPurposes, + purposes, + secondaryPurposes, + validPurposeCombinations } diff --git a/src/modules/reference/process-steps.js b/src/modules/reference/process-steps.js new file mode 100644 index 00000000..2653666d --- /dev/null +++ b/src/modules/reference/process-steps.js @@ -0,0 +1,29 @@ +'use strict' + +const ImportStep = require('./steps/import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('reference started') + + const startTime = currentTimeInNanoseconds() + + await ImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'reference complete') + } catch (error) { + global.GlobalNotifier.oops('reference failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/reference/routes.js b/src/modules/reference/routes.js new file mode 100644 index 00000000..46ab3ffd --- /dev/null +++ b/src/modules/reference/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers') + +const routes = [ + { + method: 'post', + handler: controllers.reference, + path: '/reference' + } +] + +module.exports = routes diff --git a/src/modules/reference/steps/import.js b/src/modules/reference/steps/import.js new file mode 100644 index 00000000..ba7c29f5 --- /dev/null +++ b/src/modules/reference/steps/import.js @@ -0,0 +1,33 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const ConditionTypeQueries = require('../lib/condition-type-queries.js') +const FinancialAgreementTypeQueries = require('../lib/financial-agreement-type-queries.js') +const PurposeQueries = require('../lib/purpose-queries.js') + +async function go () { + try { + global.GlobalNotifier.omg('reference.import started') + + const startTime = currentTimeInNanoseconds() + + await db.query(ConditionTypeQueries.purposeConditionTypes) + + await db.query(FinancialAgreementTypeQueries.financialAgreementTypes) + + await db.query(PurposeQueries.primaryPurposes) + await db.query(PurposeQueries.secondaryPurposes) + await db.query(PurposeQueries.purposes) + await db.query(PurposeQueries.validPurposeCombinations) + + calculateAndLogTimeTaken(startTime, 'reference.import complete') + } catch (error) { + global.GlobalNotifier.omfg('reference.import errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/return-versions/controller.js b/src/modules/return-versions/controller.js deleted file mode 100644 index ddfba988..00000000 --- a/src/modules/return-versions/controller.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const CleanJob = require('./jobs/clean.js') - -async function importReturnVersions (request, h) { - await request.messageQueue.deleteQueue(CleanJob.JOB_NAME) - await request.messageQueue.publish(CleanJob.createMessage()) - - return h.response().code(204) -} - -module.exports = { - importReturnVersions -} diff --git a/src/modules/return-versions/controllers.js b/src/modules/return-versions/controllers.js new file mode 100644 index 00000000..87a4863e --- /dev/null +++ b/src/modules/return-versions/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function returnVersions (_request, h) { + ProcessSteps.go() + + return h.response().code(204) +} + +module.exports = { + returnVersions +} diff --git a/src/modules/return-versions/jobs/clean.js b/src/modules/return-versions/jobs/clean.js deleted file mode 100644 index 8668e7cc..00000000 --- a/src/modules/return-versions/jobs/clean.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict' - -const { pool } = require('../../../lib/connectors/db.js') -const Queries = require('../lib/clean-queries.js') -const ImportJob = require('./import.js') - -const JOB_NAME = 'return-versions.clean' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - // Delete any return requirement points linked to deleted NALD return requirements - await pool.query(Queries.cleanPoints) - - // Delete any return requirement purposes linked to deleted NALD return requirements - await pool.query(Queries.cleanPurposes) - - // Delete any return requirements linked to deleted NALD return requirements - await pool.query(Queries.cleanRequirements) - - // Delete any return versions that have no return requirements and that are linked to deleted return versions - await pool.query(Queries.cleanVersions) - - // Update the mod logs to remove the return version ID for where the return version has now been deleted - await pool.query(Queries.cleanModLogs) - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (messageQueue, job) { - if (!job.failed) { - await messageQueue.publish(ImportJob.createMessage()) - - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) - } else { - global.GlobalNotifier.omg(`${JOB_NAME}: failed`) - } -} - -module.exports = { - JOB_NAME, - createMessage, - handler, - onComplete -} diff --git a/src/modules/return-versions/jobs/import.js b/src/modules/return-versions/jobs/import.js deleted file mode 100644 index 5f0db107..00000000 --- a/src/modules/return-versions/jobs/import.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict' - -const { pool } = require('../../../lib/connectors/db.js') -const Queries = require('../lib/import-queries.js') - -const JOB_NAME = 'return-versions.import' - -function createMessage () { - return { - name: JOB_NAME, - options: { - singletonKey: JOB_NAME - } - } -} - -async function handler () { - try { - global.GlobalNotifier.omg(`${JOB_NAME}: started`) - - await pool.query(Queries.importReturnVersions) - await pool.query(Queries.importReturnRequirements) - await pool.query(Queries.importReturnRequirementPoints) - await pool.query(Queries.importReturnRequirementPurposes) - await pool.query(Queries.importReturnVersionsMultipleUpload) - await pool.query(Queries.importReturnVersionsCreateNotesFromDescriptions) - await pool.query(Queries.importReturnVersionsCorrectStatusForWrls) - await pool.query(Queries.importReturnVersionsSetToDraftMissingReturnRequirements) - await pool.query(Queries.importReturnVersionsAddMissingReturnVersionEndDates) - } catch (error) { - global.GlobalNotifier.omfg(`${JOB_NAME}: errored`, error) - throw error - } -} - -async function onComplete (job) { - if (!job.failed) { - global.GlobalNotifier.omg(`${JOB_NAME}: finished`) - } else { - global.GlobalNotifier.omg(`${JOB_NAME}: failed`) - } -} - -module.exports = { - JOB_NAME, - createMessage, - handler, - onComplete -} diff --git a/src/modules/return-versions/lib/clean-queries.js b/src/modules/return-versions/lib/clean-queries.js deleted file mode 100644 index dadf8bd2..00000000 --- a/src/modules/return-versions/lib/clean-queries.js +++ /dev/null @@ -1,128 +0,0 @@ -'use strict' - -const cleanPoints = ` - WITH nald_return_requirements AS ( - SELECT concat_ws(':', nrf."FGAC_REGION_CODE", nrf."ID") AS nald_id - FROM "import"."NALD_RET_FORMATS" nrf - ) - DELETE FROM water.return_requirement_points rrp WHERE rrp.return_requirement_id IN ( - SELECT - rr.return_requirement_id - FROM - water.return_requirements rr - WHERE - NOT EXISTS ( - SELECT 1 - FROM nald_return_requirements nrr - WHERE rr.external_id = nrr.nald_id - ) - AND NOT EXISTS ( - SELECT 1 - FROM "returns"."returns" rl - WHERE - rl.return_requirement = rr.legacy_id::varchar - LIMIT 1 - ) - ); -` - -const cleanPurposes = ` - WITH nald_return_requirements AS ( - SELECT concat_ws(':', nrf."FGAC_REGION_CODE", nrf."ID") AS nald_id - FROM "import"."NALD_RET_FORMATS" nrf - ) - DELETE FROM water.return_requirement_purposes rrp WHERE rrp.return_requirement_id IN ( - SELECT - rr.return_requirement_id - FROM - water.return_requirements rr - WHERE - NOT EXISTS ( - SELECT 1 - FROM nald_return_requirements nrr - WHERE rr.external_id = nrr.nald_id - ) - AND NOT EXISTS ( - SELECT 1 - FROM "returns"."returns" rl - WHERE - rl.return_requirement = rr.legacy_id::varchar - LIMIT 1 - ) - ); -` - -const cleanRequirements = ` - WITH nald_return_requirements AS ( - SELECT concat_ws(':', nrf."FGAC_REGION_CODE", nrf."ID") AS nald_id - FROM "import"."NALD_RET_FORMATS" nrf - ) - DELETE FROM water.return_requirements WHERE return_requirement_id IN ( - SELECT - rr.return_requirement_id - FROM - water.return_requirements rr - WHERE - NOT EXISTS ( - SELECT 1 - FROM nald_return_requirements nrr - WHERE rr.external_id = nrr.nald_id - ) - AND NOT EXISTS ( - SELECT 1 - FROM "returns"."returns" rl - WHERE - rl.return_requirement = rr.legacy_id::varchar - LIMIT 1 - ) - ); -` - -const cleanVersions = ` - WITH nald_return_versions AS ( - SELECT concat_ws(':', nv."FGAC_REGION_CODE", nv."AABL_ID", nv."VERS_NO") AS nald_id - FROM "import"."NALD_RET_VERSIONS" nv - ) - DELETE FROM water.return_versions WHERE return_version_id IN ( - SELECT - rv.return_version_id - FROM - water.return_versions rv - WHERE - NOT EXISTS ( - SELECT 1 - FROM nald_return_versions nrv - WHERE rv.external_id = nrv.nald_id - ) - AND NOT EXISTS ( - SELECT 1 - FROM water.return_requirements rr - WHERE - rr.return_version_id = rv.return_version_id - LIMIT 1 - ) - ); -` - -const cleanModLogs = ` - UPDATE - water.mod_logs ml - SET - return_version_id = NULL - WHERE - ml.return_version_id IS NOT NULL - AND ml.return_version_id NOT IN ( - SELECT - rv.return_version_id - FROM - water.return_versions rv - ); -` - -module.exports = { - cleanPoints, - cleanPurposes, - cleanRequirements, - cleanVersions, - cleanModLogs -} diff --git a/src/modules/return-versions/lib/import-queries.js b/src/modules/return-versions/lib/queries.js similarity index 93% rename from src/modules/return-versions/lib/import-queries.js rename to src/modules/return-versions/lib/queries.js index 4685fcfd..b88da8d0 100644 --- a/src/modules/return-versions/lib/import-queries.js +++ b/src/modules/return-versions/lib/queries.js @@ -147,7 +147,7 @@ const importReturnRequirementPoints = `insert into water.return_requirement_poin ngr_4=excluded.ngr_4; ` -const importReturnVersionsMultipleUpload = `update water.return_versions +const setMultipleUploadFlag = `update water.return_versions set multiple_upload = distinctReturnRequirements.is_upload from ( select distinct on (rr.return_version_id) rr.return_version_id, rr.is_upload @@ -160,7 +160,7 @@ where water.return_versions.return_version_id = distinctReturnRequirements.retur // applied to a mod logs query: a common table expression (CTE). // // The sub-query version locally took more than 5 minutes. This version with the CTE took 2 seconds! -const importReturnVersionsCreateNotesFromDescriptions = ` +const createNotesFromDescriptions = ` WITH aggregated_notes AS ( SELECT rr.return_version_id, @@ -183,7 +183,7 @@ const importReturnVersionsCreateNotesFromDescriptions = ` AND rv.notes IS NULL; ` -const importReturnVersionsCorrectStatusForWrls = `UPDATE water.return_versions +const correctStatusForWrls = `UPDATE water.return_versions SET status = 'current' WHERE status = 'superseded' AND return_version_id NOT IN (SELECT rv.return_version_id @@ -196,7 +196,7 @@ INNER JOIN water.return_versions rv2 WHERE rv.end_date IS NOT NULL); ` -const importReturnVersionsSetToDraftMissingReturnRequirements = `UPDATE water.return_versions +const setToDraftMissingReturnRequirements = `UPDATE water.return_versions SET status = 'draft' WHERE status = 'current' AND ( @@ -208,7 +208,7 @@ AND return_version_id NOT IN ( ); ` -const importReturnVersionsAddMissingReturnVersionEndDates = `UPDATE water.return_versions rv +const addMissingReturnVersionEndDates = `UPDATE water.return_versions rv SET end_date = bq.new_end_date FROM (SELECT rv.return_version_id, (SELECT rv3.start_date - 1 FROM water.return_versions rv3 WHERE rv3.licence_id = madness.licence_id AND rv3.version_number = madness.min_version) AS new_end_date @@ -232,13 +232,13 @@ WHERE rv.return_version_id = bq.return_version_id; ` module.exports = { - importReturnVersions, - importReturnRequirements, + addMissingReturnVersionEndDates, + correctStatusForWrls, + createNotesFromDescriptions, importReturnRequirementPoints, importReturnRequirementPurposes, - importReturnVersionsCreateNotesFromDescriptions, - importReturnVersionsMultipleUpload, - importReturnVersionsCorrectStatusForWrls, - importReturnVersionsSetToDraftMissingReturnRequirements, - importReturnVersionsAddMissingReturnVersionEndDates + importReturnRequirements, + importReturnVersions, + setMultipleUploadFlag, + setToDraftMissingReturnRequirements } diff --git a/src/modules/return-versions/plugin.js b/src/modules/return-versions/plugin.js deleted file mode 100644 index 48368d0d..00000000 --- a/src/modules/return-versions/plugin.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const cron = require('node-cron') - -const CleanJob = require('./jobs/clean.js') -const ImportJob = require('./jobs/import.js') - -const config = require('../../../config') - -async function register (server, _options) { - // Register clean return versions job - await server.messageQueue.subscribe(CleanJob.JOB_NAME, CleanJob.handler) - await server.messageQueue.onComplete(CleanJob.JOB_NAME, (executedJob) => { - return CleanJob.onComplete(server.messageQueue, executedJob) - }) - - // Register import return versions job - await server.messageQueue.subscribe(ImportJob.JOB_NAME, ImportJob.handler) - await server.messageQueue.onComplete(ImportJob.JOB_NAME, (executedJob) => { - return ImportJob.onComplete(executedJob) - }) - - // Schedule clean job using cron. The clean job will then queue the import job in its onComplete - cron.schedule(config.import.returnVersions.schedule, async () => { - await server.messageQueue.publish(CleanJob.createMessage()) - }) -} - -module.exports = { - plugin: { - name: 'importReturnVersions', - dependencies: ['pgBoss'], - register - } -} diff --git a/src/modules/return-versions/process-steps.js b/src/modules/return-versions/process-steps.js new file mode 100644 index 00000000..973caa29 --- /dev/null +++ b/src/modules/return-versions/process-steps.js @@ -0,0 +1,29 @@ +'use strict' + +const ImportStep = require('./steps/import.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go () { + let processComplete = false + + try { + global.GlobalNotifier.omg('return-versions started') + + const startTime = currentTimeInNanoseconds() + + await ImportStep.go() + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'return-versions complete') + } catch (error) { + global.GlobalNotifier.oops('return-versions failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/return-versions/routes.js b/src/modules/return-versions/routes.js index 20ec8171..11d3ef38 100644 --- a/src/modules/return-versions/routes.js +++ b/src/modules/return-versions/routes.js @@ -1,12 +1,12 @@ 'use strict' -const controller = require('./controller') +const controllers = require('./controllers') const routes = [ { method: 'post', - handler: controller.importReturnVersions, - path: '/import/return-versions' + handler: controllers.returnVersions, + path: '/return-versions' } ] diff --git a/src/modules/return-versions/steps/import.js b/src/modules/return-versions/steps/import.js new file mode 100644 index 00000000..e41f20e7 --- /dev/null +++ b/src/modules/return-versions/steps/import.js @@ -0,0 +1,32 @@ +'use strict' + +const db = require('../../../lib/connectors/db.js') +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const Queries = require('../lib/queries.js') + +async function go () { + try { + global.GlobalNotifier.omg('return-versions.import started') + + const startTime = currentTimeInNanoseconds() + + await db.query(Queries.importReturnVersions) + await db.query(Queries.importReturnRequirements) + await db.query(Queries.importReturnRequirementPoints) + await db.query(Queries.importReturnRequirementPurposes) + await db.query(Queries.setMultipleUploadFlag) + await db.query(Queries.createNotesFromDescriptions) + await db.query(Queries.correctStatusForWrls) + await db.query(Queries.setToDraftMissingReturnRequirements) + await db.query(Queries.addMissingReturnVersionEndDates) + + calculateAndLogTimeTaken(startTime, 'return-versions.import complete') + } catch (error) { + global.GlobalNotifier.omfg('return-versions.import errored', error) + throw error + } +} + +module.exports = { + go +} diff --git a/src/modules/tracker/controllers.js b/src/modules/tracker/controllers.js new file mode 100644 index 00000000..901cba5c --- /dev/null +++ b/src/modules/tracker/controllers.js @@ -0,0 +1,13 @@ +'use strict' + +const ProcessSteps = require('./process-steps.js') + +async function tracker (_request, h) { + ProcessSteps.go('Someone triggered a test of the tracker email. You can ignore this message (it obviously worked!)') + + return h.response().code(204) +} + +module.exports = { + tracker +} diff --git a/src/modules/tracker/process-steps.js b/src/modules/tracker/process-steps.js new file mode 100644 index 00000000..ad53ae08 --- /dev/null +++ b/src/modules/tracker/process-steps.js @@ -0,0 +1,29 @@ +'use strict' + +const SendEmailStep = require('./steps/send-email.js') + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../lib/general.js') + +async function go (message) { + let processComplete = false + + try { + global.GlobalNotifier.omg('tracker started') + + const startTime = currentTimeInNanoseconds() + + await SendEmailStep.go(message) + + processComplete = true + + calculateAndLogTimeTaken(startTime, 'tracker complete') + } catch (error) { + global.GlobalNotifier.oops('tracker failed') + } + + return processComplete +} + +module.exports = { + go +} diff --git a/src/modules/tracker/routes.js b/src/modules/tracker/routes.js new file mode 100644 index 00000000..2297a56a --- /dev/null +++ b/src/modules/tracker/routes.js @@ -0,0 +1,13 @@ +'use strict' + +const controllers = require('./controllers') + +const routes = [ + { + method: 'post', + handler: controllers.tracker, + path: '/tracker' + } +] + +module.exports = routes diff --git a/src/modules/tracker/steps/send-email.js b/src/modules/tracker/steps/send-email.js new file mode 100644 index 00000000..fb8ac18d --- /dev/null +++ b/src/modules/tracker/steps/send-email.js @@ -0,0 +1,40 @@ +'use strict' + +const { calculateAndLogTimeTaken, currentTimeInNanoseconds } = require('../../../lib/general.js') +const NotifyConnector = require('../../../lib/connectors/water/notify.js') + +const config = require('../../../../config.js') + +async function go (message) { + try { + global.GlobalNotifier.omg('tracker.send-email started') + + const startTime = currentTimeInNanoseconds() + + const environment = `Sent from ${config.environment}\n\n` + + _send(`${environment}${message}`) + + calculateAndLogTimeTaken(startTime, 'tracker.send-email complete') + } catch (error) { + global.GlobalNotifier.omfg('tracker.send-email errored', error) + throw error + } +} + +function _send (content) { + NotifyConnector.postSendNotify( + 'email', + { + templateId: config.notify.templateId, + personalisation: { content }, + recipient: config.notify.mailbox + } + ).catch((_error) => { + global.GlobalNotifier.oops('tracker.send-email noting send errors even when email sent!') + }) +} + +module.exports = { + go +} diff --git a/src/plugins/pg-boss.plugin.js b/src/plugins/pg-boss.plugin.js deleted file mode 100644 index ac19aa20..00000000 --- a/src/plugins/pg-boss.plugin.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' - -const config = require('../../config') -const db = require('../../src/lib/connectors/db') - -const plugin = { - name: 'pgBoss', - register: async (server, options) => { - const PgBoss = require('pg-boss') - const boss = new PgBoss(options) - server.decorate('server', 'messageQueue', boss) - server.decorate('request', 'messageQueue', boss) - return boss.start() - } -} - -const options = { - ...config.pgBoss, - db: { - executeSql: (...args) => db.pool.query(...args) - } -} - -module.exports = { - plugin, - options -} diff --git a/src/routes.js b/src/routes.js index 1ec5276c..a1dbea00 100644 --- a/src/routes.js +++ b/src/routes.js @@ -1,21 +1,34 @@ -const chargingImportRoutes = require('./modules/charging-import/routes') const coreRoutes = require('./modules/core/routes') const healthRoutes = require('./modules/health/routes') -const jobSummaryRoutes = require('./modules/jobs/routes') -const licenceImportRoutes = require('./modules/licence-import/routes') -const naldImportRoutes = require('./modules/nald-import/routes') const returnsRoutes = require('./modules/returns/routes') -const returnVersionsRoutes = require('./modules/return-versions/routes.js') -const modLogsRoutes = require('./modules/mod-logs/routes.js') + +const NaldDataRoutes = require('./modules/nald-data/routes.js') +const CleanRoutes = require('./modules/clean/routes.js') +const PermitRoutes = require('./modules/permit/routes.js') +const CompanyDetailsRoutes = require('./modules/company-details/routes.js') +const ReturnVersionsRoutes = require('./modules/return-versions/routes.js') +const ModLogsRoutes = require('./modules/mod-logs/routes.js') +const ReferenceRoutes = require('./modules/reference/routes.js') +const ChargeVersionsRoutes = require('./modules/charge-versions/routes.js') +const BillRunsRoutes = require('./modules/bill-runs/routes.js') +const LicenceDetailsRoutes = require('./modules/licence-details/routes.js') +const TrackerRoutes = require('./modules/tracker/routes.js') +const NightlyImportRoutes = require('./modules/nightly-import/routes.js') module.exports = [ - ...chargingImportRoutes, ...coreRoutes, ...healthRoutes, - ...jobSummaryRoutes, - ...licenceImportRoutes, - ...naldImportRoutes, ...returnsRoutes, - ...returnVersionsRoutes, - ...modLogsRoutes + ...NaldDataRoutes, + ...CleanRoutes, + ...PermitRoutes, + ...CompanyDetailsRoutes, + ...ReturnVersionsRoutes, + ...ModLogsRoutes, + ...ReferenceRoutes, + ...ChargeVersionsRoutes, + ...BillRunsRoutes, + ...LicenceDetailsRoutes, + ...TrackerRoutes, + ...NightlyImportRoutes ]