From 81263616973b187f83b3f0d9cced77cdf7aaff2c Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 08:06:06 +0200 Subject: [PATCH 01/67] Add initial Jest config --- jest.config.ts | 20 + package.json | 4 + tsconfig.json | 8 +- yarn.lock | 1760 +++++++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 1765 insertions(+), 27 deletions(-) create mode 100644 jest.config.ts diff --git a/jest.config.ts b/jest.config.ts new file mode 100644 index 00000000..45ef536b --- /dev/null +++ b/jest.config.ts @@ -0,0 +1,20 @@ +import type { Config } from '@jest/types'; + +const config: Config.InitialOptions = { + verbose: true, + preset: 'ts-jest', + testEnvironment: 'node', + testPathIgnorePatterns: ['/node_modules/'], + testMatch: ['/**/*.spec.ts'], + coveragePathIgnorePatterns: ['/node_modules/', '/test/'], + clearMocks: true, + transform: { + '^.+\\.ts?$': 'ts-jest', + }, + transformIgnorePatterns: ['node_modules/(?!(@unocha)/)'], + modulePathIgnorePatterns: ['/test/'], + setupFilesAfterEnv: ['/tests/test-environment-setup.ts'], + testTimeout: 100_000, +}; + +export default config; diff --git a/package.json b/package.json index 6fda1312..9df752b5 100644 --- a/package.json +++ b/package.json @@ -34,12 +34,16 @@ "@hapi/hapi": "^20.2.1", "@types/bunyan": "^1.8.8", "@types/hapi__hapi": "^20.0.9", + "@types/jest": "^29.5.5", "@types/node": "^20.8.10", + "@types/pg": "^8.10.2", "@unocha/hpc-repo-tools": "^4.0.0", "eslint": "^8.52.0", "husky": "^8.0.3", + "jest": "^29.7.0", "lint-staged": "^15.0.2", "prettier": "3.0.3", + "ts-jest": "^29.1.1", "ts-node-dev": "^2.0.0" }, "engines": { diff --git a/tsconfig.json b/tsconfig.json index cf104830..d39d5a01 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,6 +17,12 @@ "strictPropertyInitialization": false, "strict": true }, - "include": ["src/**/*.ts", "src/**/*.js", "start.js", "bin"], + "include": [ + "src/**/*.ts", + "src/**/*.js", + "start.js", + "bin", + "jest.config.ts" + ], "exclude": ["node_modules"] } diff --git a/yarn.lock b/yarn.lock index d8933ed7..77cc98bc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7,6 +7,14 @@ resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== +"@ampproject/remapping@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + "@apollo/protobufjs@1.2.6": version "1.2.6" resolved "https://registry.yarnpkg.com/@apollo/protobufjs/-/protobufjs-1.2.6.tgz#d601e65211e06ae1432bf5993a1a0105f2862f27" @@ -115,7 +123,7 @@ dependencies: xss "^1.0.8" -"@babel/code-frame@^7.0.0": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.13": version "7.22.13" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== @@ -123,11 +131,134 @@ "@babel/highlight" "^7.22.13" chalk "^2.4.2" +"@babel/compat-data@^7.22.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.3.tgz#3febd552541e62b5e883a25eb3effd7c7379db11" + integrity sha512-BmR4bWbDIoFJmJ9z2cZ8Gmm2MXgEDgjdWgpKmKWUt54UGFJdlj31ECtbaDvCG/qVdG3AQ1SfpZEs01lUFbzLOQ== + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.3.tgz#5ec09c8803b91f51cc887dedc2654a35852849c9" + integrity sha512-Jg+msLuNuCJDyBvFv5+OKOUjWMZgd85bKjbICd3zWrKAo+bJ49HJufi7CQE0q0uR8NGyO6xkCACScNqyjHSZew== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.3" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helpers" "^7.23.2" + "@babel/parser" "^7.23.3" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.3" + "@babel/types" "^7.23.3" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + +"@babel/generator@^7.23.3", "@babel/generator@^7.7.2": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.3.tgz#86e6e83d95903fbe7613f448613b8b319f330a8e" + integrity sha512-keeZWAV4LU3tW0qRi19HRpabC/ilM0HRBBzf9/k8FFiG4KVpiv0FIy4hHfLfFQZNhziCTPTmd59zoyv6DNISzg== + dependencies: + "@babel/types" "^7.23.3" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + +"@babel/helper-compilation-targets@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.15" + browserslist "^4.21.9" + lru-cache "^5.1.1" + semver "^6.3.1" + +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== + dependencies: + "@babel/types" "^7.22.15" + +"@babel/helper-module-transforms@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" + integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== + +"@babel/helper-simple-access@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-option@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz#694c30dfa1d09a6534cdfcafbe56789d36aba040" + integrity sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA== + +"@babel/helpers@^7.23.2": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.2.tgz#2832549a6e37d484286e15ba36a5330483cac767" + integrity sha512-lzchcp8SjTSVe/fPmLwtWVBFC7+Tbn8LGHDVfDp9JGxpAY5opSaEFgt8UQvrnECWOTdji2mOWMz1rOhkHscmGQ== + dependencies: + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.2" + "@babel/types" "^7.23.0" + "@babel/highlight@^7.22.13": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" @@ -137,6 +268,148 @@ chalk "^2.4.2" js-tokens "^4.0.0" +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.15", "@babel/parser@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.3.tgz#0ce0be31a4ca4f1884b5786057cadcb6c3be58f9" + integrity sha512-uVsWNvlVsIninV2prNz/3lHCb+5CJ+e+IUBfbjToAHODtfGYLfCFuY4AU7TskI+dAKk+njsPiBjq1gKTvZOBaw== + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.7.2": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz#8f2e4f8a9b5f9aa16067e142c1ac9cd9f810f473" + integrity sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.7.2": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.23.3.tgz#24f460c85dbbc983cd2b9c4994178bcc01df958f" + integrity sha512-9EiNjVJOMwCO+43TqoTrgQ8jMwcAd0sWyXi9RPfIsLTj4R2MADDDQXELhffaUx/uJv2AYcxBgPwH6j4TIA4ytQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/template@^7.22.15", "@babel/template@^7.3.3": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.23.2", "@babel/traverse@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.3.tgz#26ee5f252e725aa7aca3474aa5b324eaf7908b5b" + integrity sha512-+K0yF1/9yR0oHdE0StHuEj3uTPzwwbrLGfNOndVJVV2TqA5+j3oljJUb4nmB954FLGjNem976+B+eDuLIjesiQ== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.3" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.3" + "@babel/types" "^7.23.3" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.3", "@babel/types@^7.3.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.3.tgz#d5ea892c07f2ec371ac704420f4dcdb07b5f9598" + integrity sha512-OZnvoH2l8PK5eUvEcUyCt/sXgr/h+UWpVuBbOljwcrAgUl6lpchoQ++PHGyQy1AtYnVA6CEq3y5xeEI10brpXw== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + "@cspotcode/source-map-consumer@0.8.0": version "0.8.0" resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b" @@ -533,21 +806,253 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz#e5211452df060fa8522b55c7b3c0c4d1981cb044" integrity sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw== +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.7.0.tgz#cd4822dbdb84529265c5a2bdb529a3c9cc950ffc" + integrity sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg== + dependencies: + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^29.7.0" + jest-util "^29.7.0" + slash "^3.0.0" + +"@jest/core@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.7.0.tgz#b6cccc239f30ff36609658c5a5e2291757ce448f" + integrity sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== + dependencies: + "@jest/console" "^29.7.0" + "@jest/reporters" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + ci-info "^3.2.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^29.7.0" + jest-config "^29.7.0" + jest-haste-map "^29.7.0" + jest-message-util "^29.7.0" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-resolve-dependencies "^29.7.0" + jest-runner "^29.7.0" + jest-runtime "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" + jest-watcher "^29.7.0" + micromatch "^4.0.4" + pretty-format "^29.7.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.7.0.tgz#24d61f54ff1f786f3cd4073b4b94416383baf2a7" + integrity sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw== + dependencies: + "@jest/fake-timers" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + jest-mock "^29.7.0" + +"@jest/expect-utils@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.7.0.tgz#023efe5d26a8a70f21677d0a1afc0f0a44e3a1c6" + integrity sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA== + dependencies: + jest-get-type "^29.6.3" + +"@jest/expect@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.7.0.tgz#76a3edb0cb753b70dfbfe23283510d3d45432bf2" + integrity sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ== + dependencies: + expect "^29.7.0" + jest-snapshot "^29.7.0" + +"@jest/fake-timers@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.7.0.tgz#fd91bf1fffb16d7d0d24a426ab1a47a49881a565" + integrity sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ== + dependencies: + "@jest/types" "^29.6.3" + "@sinonjs/fake-timers" "^10.0.2" + "@types/node" "*" + jest-message-util "^29.7.0" + jest-mock "^29.7.0" + jest-util "^29.7.0" + +"@jest/globals@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.7.0.tgz#8d9290f9ec47ff772607fa864ca1d5a2efae1d4d" + integrity sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ== + dependencies: + "@jest/environment" "^29.7.0" + "@jest/expect" "^29.7.0" + "@jest/types" "^29.6.3" + jest-mock "^29.7.0" + +"@jest/reporters@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.7.0.tgz#04b262ecb3b8faa83b0b3d321623972393e8f4c7" + integrity sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" + "@jridgewell/trace-mapping" "^0.3.18" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^6.0.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-message-util "^29.7.0" + jest-util "^29.7.0" + jest-worker "^29.7.0" + slash "^3.0.0" + string-length "^4.0.1" + strip-ansi "^6.0.0" + v8-to-istanbul "^9.0.1" + +"@jest/schemas@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== + dependencies: + "@sinclair/typebox" "^0.27.8" + +"@jest/source-map@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.6.3.tgz#d90ba772095cf37a34a5eb9413f1b562a08554c4" + integrity sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw== + dependencies: + "@jridgewell/trace-mapping" "^0.3.18" + callsites "^3.0.0" + graceful-fs "^4.2.9" + +"@jest/test-result@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.7.0.tgz#8db9a80aa1a097bb2262572686734baed9b1657c" + integrity sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA== + dependencies: + "@jest/console" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz#6cef977ce1d39834a3aea887a1726628a6f072ce" + integrity sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw== + dependencies: + "@jest/test-result" "^29.7.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.7.0" + slash "^3.0.0" + +"@jest/transform@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.7.0.tgz#df2dd9c346c7d7768b8a06639994640c642e284c" + integrity sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^29.6.3" + "@jridgewell/trace-mapping" "^0.3.18" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^2.0.0" + fast-json-stable-stringify "^2.1.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.7.0" + jest-regex-util "^29.6.3" + jest-util "^29.7.0" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.2" + +"@jest/types@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" + integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== + dependencies: + "@jest/schemas" "^29.6.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + "@josephg/resolvable@^1.0.0": version "1.0.1" resolved "https://registry.yarnpkg.com/@josephg/resolvable/-/resolvable-1.0.1.tgz#69bc4db754d79e1a2f17a650d3466e038d94a5eb" integrity sha512-CtzORUwWTTOTqfVtHaKRJ0I1kNQd1bpn3sUh8I3nJDVY+5/M/Oe1DnEWzPQvqq/xPIIkzzzIP7mfCoAjFRvDhg== +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== + +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + "@jridgewell/trace-mapping@0.3.9": version "0.3.9" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" @@ -556,6 +1061,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.20" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" + integrity sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@nodelib/fs.scandir@2.1.4": version "2.1.4" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69" @@ -751,6 +1264,25 @@ resolved "https://registry.yarnpkg.com/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df" integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== +"@sinclair/typebox@^0.27.8": + version "0.27.8" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== + +"@sinonjs/commons@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" + integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^10.0.2": + version "10.3.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" + integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== + dependencies: + "@sinonjs/commons" "^3.0.0" + "@tootallnate/once@1": version "1.1.2" resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" @@ -776,6 +1308,39 @@ resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e" integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== +"@types/babel__core@^7.1.14": + version "7.20.4" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.4.tgz#26a87347e6c6f753b3668398e34496d6d9ac6ac0" + integrity sha512-mLnSC22IC4vcWiuObSRjrLd9XcBTGf59vUSoq2jkQDJ/QQ8PMI9rSuzE+aEV8karUMbskw07bKYoUJCKTUaygg== + dependencies: + "@babel/parser" "^7.20.7" + "@babel/types" "^7.20.7" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.7" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.7.tgz#a7aebf15c7bc0eb9abd638bdb5c0b8700399c9d0" + integrity sha512-6Sfsq+EaaLrw4RmdFWE9Onp63TOUue71AWb4Gpa6JxzgTYtimbM086WnYTy2U67AofR++QKCo08ZP6pwx8YFHQ== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.4.tgz#5672513701c1b2199bc6dad636a9d7491586766f" + integrity sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": + version "7.20.4" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.4.tgz#ec2c06fed6549df8bc0eb4615b683749a4a92e1b" + integrity sha512-mSM/iKUk5fDDrEV/e83qY+Cr3I1+Q3qqTuEn++HAWYjEa1+NxZr6CNrcJGf2ZTnq4HoFGC3zaTPZTobCzCFukA== + dependencies: + "@babel/types" "^7.20.7" + "@types/bunyan@^1.8.8": version "1.8.8" resolved "https://registry.yarnpkg.com/@types/bunyan/-/bunyan-1.8.8.tgz#8d6d33f090f37c07e2a80af30ae728450a101008" @@ -791,6 +1356,13 @@ "@types/minimatch" "*" "@types/node" "*" +"@types/graceful-fs@^4.1.3": + version "4.1.9" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.9.tgz#2a06bc0f68a20ab37b3e36aa238be6abdf49e8b4" + integrity sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ== + dependencies: + "@types/node" "*" + "@types/hapi__catbox@*": version "10.2.3" resolved "https://registry.yarnpkg.com/@types/hapi__catbox/-/hapi__catbox-10.2.3.tgz#c9279c16d709bf2987491c332e11d18124ae018f" @@ -824,6 +1396,33 @@ dependencies: "@types/node" "*" +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7" + integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w== + +"@types/istanbul-lib-report@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz#53047614ae72e19fc0401d872de3ae2b4ce350bf" + integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54" + integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@^29.5.5": + version "29.5.8" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.8.tgz#ed5c256fe2bc7c38b1915ee5ef1ff24a3427e120" + integrity sha512-fXEFTxMV2Co8ZF5aYFJv+YeA08RTYJfhtN5c9JSv/mFEMe+xxjufCb+PHL+bJcMs/ebPUsBu+UNTEz+ydXrR6g== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + "@types/json-schema@^7.0.12": version "7.0.14" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.14.tgz#74a97a5573980802f32c8e47b663530ab3b6b7d1" @@ -884,6 +1483,15 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.3.tgz#291c243e4b94dbfbc0c0ee26b7666f1d5c030e2c" integrity sha512-ehPtgRgaULsFG8x0NeYJvmyH1hmlfsNLujHe9dQEia/7MAJYdzMSi19JtchUHjmBA6XC/75dK55mzZH+RyieSg== +"@types/pg@^8.10.2": + version "8.10.9" + resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.10.9.tgz#d20bb948c6268c5bd847e2bf968f1194c5a2355a" + integrity sha512-UksbANNE/f8w0wOMxVKKIrLCbEMV+oM1uKejmwXr39olg4xqcfBDbXxObJAt6XxHbDa4XTKOlUEcEltXDX+XLQ== + dependencies: + "@types/node" "*" + pg-protocol "*" + pg-types "^4.0.1" + "@types/semver@^7.3.3": version "7.3.5" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.5.tgz#74deebbbcb1e86634dbf10a5b5e8798626f5a597" @@ -894,6 +1502,11 @@ resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.4.tgz#0a41252ad431c473158b22f9bfb9a63df7541cff" integrity sha512-MMzuxN3GdFwskAnb6fz0orFvhfqi752yjaXylr0Rp4oDg5H0Zn1IuyRhDVvYOwAXoJirx2xuS16I3WjxnAIHiQ== +"@types/stack-utils@^2.0.0": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.3.tgz#6209321eb2c1712a7e7466422b8cb1fc0d9dd5d8" + integrity sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw== + "@types/strip-bom@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/strip-bom/-/strip-bom-3.0.0.tgz#14a8ec3956c2e81edb7520790aecf21c290aebd2" @@ -909,6 +1522,18 @@ resolved "https://registry.yarnpkg.com/@types/validator/-/validator-13.7.10.tgz#f9763dc0933f8324920afa9c0790308eedf55ca7" integrity sha512-t1yxFAR2n0+VO6hd/FJ9F2uezAZVWHLmpmlJzm1eX03+H7+HsuTAp7L8QJs+2pQCfWkP1+EXsGK9Z9v7o/qPVQ== +"@types/yargs-parser@*": + version "21.0.3" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" + integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== + +"@types/yargs@^17.0.8": + version "17.0.31" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.31.tgz#8fd0089803fd55d8a285895a18b88cb71a99683c" + integrity sha512-bocYSx4DI8TmdlvxqGpVNXOgCNR1Jj0gNPhhAY+iz1rgKDAaYrAYdFYnhDV1IFuiuVc9HkOwyDcFxaTElF3/wg== + dependencies: + "@types/yargs-parser" "*" + "@typescript-eslint/eslint-plugin@6.9.1": version "6.9.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.9.1.tgz#d8ce497dc0ed42066e195c8ecc40d45c7b1254f4" @@ -1078,6 +1703,13 @@ ansi-colors@^4.1.1: resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== +ansi-escapes@^4.2.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + ansi-escapes@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-5.0.0.tgz#b6a0caf0eef0c41af190e9a749e0c00ec04bb2a6" @@ -1102,13 +1734,18 @@ ansi-styles@^3.2.1: dependencies: color-convert "^1.9.0" -ansi-styles@^4.1.0: +ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + ansi-styles@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.1.0.tgz#87313c102b8118abd57371afab34618bf7350ed3" @@ -1119,18 +1756,18 @@ ansi-styles@^6.1.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== -anymatch@~3.1.1: - version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" -anymatch@~3.1.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== +anymatch@~3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" @@ -1326,6 +1963,66 @@ axios@^0.21.0: dependencies: follow-redirects "^1.14.0" +babel-jest@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.7.0.tgz#f4369919225b684c56085998ac63dbd05be020d5" + integrity sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg== + dependencies: + "@jest/transform" "^29.7.0" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^29.6.3" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^29.6.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz#aadbe943464182a8922c3c927c3067ff40d24626" + integrity sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.1.14" + "@types/babel__traverse" "^7.0.6" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^29.6.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz#fa05fa510e7d493896d7b0dd2033601c840f171c" + integrity sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA== + dependencies: + babel-plugin-jest-hoist "^29.6.3" + babel-preset-current-node-syntax "^1.0.0" + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -1390,6 +2087,30 @@ braces@^3.0.2, braces@~3.0.2: dependencies: fill-range "^7.0.1" +browserslist@^4.21.9: + version "4.22.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.1.tgz#ba91958d1a59b87dab6fed8dfbcb3da5e2e9c619" + integrity sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ== + dependencies: + caniuse-lite "^1.0.30001541" + electron-to-chromium "^1.4.535" + node-releases "^2.0.13" + update-browserslist-db "^1.0.13" + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -1440,6 +2161,21 @@ callsites@^3.0.0: resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-lite@^1.0.30001541: + version "1.0.30001563" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001563.tgz#aa68a64188903e98f36eb9c56e48fba0c1fe2a32" + integrity sha512-na2WUmOxnwIZtwnFI2CZ/3er0wdNzU7hN+cPYz/z2ajHThnkWjNBOpEPP4n+4r2WPM847JaMotaJE3bnfzjyKw== + chalk@3.0.0, chalk@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" @@ -1470,6 +2206,11 @@ chalk@^4.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + charm@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/charm/-/charm-0.1.2.tgz#06c21eed1a1b06aeb67553cdc53e23274bac2296" @@ -1505,11 +2246,16 @@ chokidar@^3.5.3: optionalDependencies: fsevents "~2.3.2" -ci-info@^3.8.0: +ci-info@^3.2.0, ci-info@^3.8.0: version "3.9.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== +cjs-module-lexer@^1.0.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" + integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== + class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" @@ -1558,6 +2304,25 @@ cli-truncate@^3.1.0: slice-ansi "^5.0.0" string-width "^5.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +collect-v8-coverage@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" + integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== + collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" @@ -1645,6 +2410,11 @@ continuation-local-storage@^3.2.1: async-listener "^0.6.0" emitter-listener "^1.1.1" +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" @@ -1655,6 +2425,19 @@ core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +create-jest@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/create-jest/-/create-jest-29.7.0.tgz#a355c5b3cb1e1af02ba177fe7afd7feee49a5320" + integrity sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== + dependencies: + "@jest/types" "^29.6.3" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-config "^29.7.0" + jest-util "^29.7.0" + prompts "^2.0.1" + create-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" @@ -1713,7 +2496,7 @@ debug@4.1.1: dependencies: ms "^2.1.1" -debug@4.3.4, debug@^4.3.4: +debug@4.3.4, debug@^4.1.0, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -1746,6 +2529,11 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== +dedent@^1.0.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.1.tgz#4f3fc94c8b711e9bb2800d185cd6ad20f2a90aff" + integrity sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg== + deep-is@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" @@ -1756,6 +2544,11 @@ deep-is@~0.1.3: resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== +deepmerge@^4.2.2: + version "4.3.1" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== + define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" @@ -1803,6 +2596,16 @@ detect-file@^1.0.0: resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +diff-sequences@^29.6.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" + integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== + diff@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" @@ -1841,6 +2644,11 @@ eastasianwidth@^0.2.0: resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== +electron-to-chromium@^1.4.535: + version "1.4.587" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.587.tgz#d8b864f21338b60798d447a3d83b90753f701d07" + integrity sha512-RyJX0q/zOkAoefZhB9XHghGeATVP0Q3mwA253XD/zj2OeXc+JZB9pCaEv6R578JUYaWM9PRhye0kXvd/V1cQ3Q== + emitter-listener@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/emitter-listener/-/emitter-listener-1.1.2.tgz#56b140e8f6992375b3d7cb2cab1cc7432d9632e8" @@ -1848,6 +2656,16 @@ emitter-listener@^1.1.1: dependencies: shimmer "^1.2.0" +emittery@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" + integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + emoji-regex@^9.2.2: version "9.2.2" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" @@ -1867,11 +2685,21 @@ error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" @@ -2073,6 +2901,26 @@ execa@8.0.1: signal-exit "^4.1.0" strip-final-newline "^3.0.0" +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" @@ -2093,6 +2941,17 @@ expand-tilde@^2.0.0, expand-tilde@^2.0.2: dependencies: homedir-polyfill "^1.0.1" +expect@^29.0.0, expect@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" + integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== + dependencies: + "@jest/expect-utils" "^29.7.0" + jest-get-type "^29.6.3" + jest-matcher-utils "^29.7.0" + jest-message-util "^29.7.0" + jest-util "^29.7.0" + extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" @@ -2148,7 +3007,7 @@ fast-json-patch@^3.0.0-1: resolved "https://registry.yarnpkg.com/fast-json-patch/-/fast-json-patch-3.1.1.tgz#85064ea1b1ebf97a3f7ad01e23f9337e72c66947" integrity sha512-vf6IHUX2SBcA+5/+4883dsIjpBTqmfBjmYiWK1savxQmFk4JfBMLa7ynTYOs1Rolp/T1betJxHiGD3g1Mn8lUQ== -fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== @@ -2165,6 +3024,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fb-watchman@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + fclone@1.0.11, fclone@~1.0.11: version "1.0.11" resolved "https://registry.yarnpkg.com/fclone/-/fclone-1.0.11.tgz#10e85da38bfea7fc599341c296ee1d77266ee640" @@ -2199,7 +3065,7 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -find-up@^4.1.0: +find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== @@ -2306,6 +3172,11 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= +fsevents@^2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + fsevents@~2.3.1, fsevents@~2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" @@ -2329,6 +3200,26 @@ function-bind@^1.1.2: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + get-stream@^8.0.1: version "8.0.1" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-8.0.1.tgz#def9dfd71742cd7754a7761ed43749a27d02eca2" @@ -2415,6 +3306,18 @@ glob@^7.1.3, glob@^7.1.6: once "^1.3.0" path-is-absolute "^1.0.0" +glob@^7.1.4: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" @@ -2435,6 +3338,11 @@ global-prefix@^1.0.1: is-windows "^1.0.1" which "^1.2.14" +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + globals@^13.19.0: version "13.19.0" resolved "https://registry.yarnpkg.com/globals/-/globals-13.19.0.tgz#7a42de8e6ad4f7242fbcca27ea5b23aca367b5c8" @@ -2459,6 +3367,11 @@ graceful-fs@^4.1.6, graceful-fs@^4.2.0: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== +graceful-fs@^4.2.9: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + graphemer@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" @@ -2557,6 +3470,11 @@ hosted-git-info@^2.1.4: resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + http-errors@1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c" @@ -2585,6 +3503,11 @@ https-proxy-agent@5, https-proxy-agent@^5.0.0: agent-base "6" debug "4" +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + human-signals@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28" @@ -2620,6 +3543,14 @@ import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" @@ -2772,11 +3703,21 @@ is-extglob@^2.1.1: resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + is-fullwidth-code-point@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz#fae3167c729e7463f8461ce512b080a49268aa88" integrity sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ== +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" @@ -2822,6 +3763,11 @@ is-relative@^1.0.0: dependencies: is-unc-path "^1.0.0" +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + is-stream@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac" @@ -2866,11 +3812,422 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756" + integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== + +istanbul-lib-instrument@^5.0.4: + version "5.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-instrument@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.1.tgz#71e87707e8041428732518c6fb5211761753fbdf" + integrity sha512-EAMEJBsYuyyztxMxW3g7ugGPkrZsV57v0Hmv3mm1uQsmB+QnZuepg731CRaIgeUVSdmsTngOkSnauNF8p7FIhA== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^7.5.4" + +istanbul-lib-report@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" + integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^4.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.6" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" + integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + iterall@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== +jest-changed-files@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" + integrity sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== + dependencies: + execa "^5.0.0" + jest-util "^29.7.0" + p-limit "^3.1.0" + +jest-circus@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.7.0.tgz#b6817a45fcc835d8b16d5962d0c026473ee3668a" + integrity sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw== + dependencies: + "@jest/environment" "^29.7.0" + "@jest/expect" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^1.0.0" + is-generator-fn "^2.0.0" + jest-each "^29.7.0" + jest-matcher-utils "^29.7.0" + jest-message-util "^29.7.0" + jest-runtime "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" + p-limit "^3.1.0" + pretty-format "^29.7.0" + pure-rand "^6.0.0" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-cli@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.7.0.tgz#5592c940798e0cae677eec169264f2d839a37995" + integrity sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== + dependencies: + "@jest/core" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" + chalk "^4.0.0" + create-jest "^29.7.0" + exit "^0.1.2" + import-local "^3.0.2" + jest-config "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" + yargs "^17.3.1" + +jest-config@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.7.0.tgz#bcbda8806dbcc01b1e316a46bb74085a84b0245f" + integrity sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== + dependencies: + "@babel/core" "^7.11.6" + "@jest/test-sequencer" "^29.7.0" + "@jest/types" "^29.6.3" + babel-jest "^29.7.0" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-circus "^29.7.0" + jest-environment-node "^29.7.0" + jest-get-type "^29.6.3" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-runner "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^29.7.0" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a" + integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw== + dependencies: + chalk "^4.0.0" + diff-sequences "^29.6.3" + jest-get-type "^29.6.3" + pretty-format "^29.7.0" + +jest-docblock@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" + integrity sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== + dependencies: + detect-newline "^3.0.0" + +jest-each@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" + integrity sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ== + dependencies: + "@jest/types" "^29.6.3" + chalk "^4.0.0" + jest-get-type "^29.6.3" + jest-util "^29.7.0" + pretty-format "^29.7.0" + +jest-environment-node@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.7.0.tgz#0b93e111dda8ec120bc8300e6d1fb9576e164376" + integrity sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw== + dependencies: + "@jest/environment" "^29.7.0" + "@jest/fake-timers" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + jest-mock "^29.7.0" + jest-util "^29.7.0" + +jest-get-type@^29.6.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1" + integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw== + +jest-haste-map@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.7.0.tgz#3c2396524482f5a0506376e6c858c3bbcc17b104" + integrity sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== + dependencies: + "@jest/types" "^29.6.3" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^29.6.3" + jest-util "^29.7.0" + jest-worker "^29.7.0" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-leak-detector@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz#5b7ec0dadfdfec0ca383dc9aa016d36b5ea4c728" + integrity sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw== + dependencies: + jest-get-type "^29.6.3" + pretty-format "^29.7.0" + +jest-matcher-utils@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz#ae8fec79ff249fd592ce80e3ee474e83a6c44f12" + integrity sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g== + dependencies: + chalk "^4.0.0" + jest-diff "^29.7.0" + jest-get-type "^29.6.3" + pretty-format "^29.7.0" + +jest-message-util@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3" + integrity sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^29.6.3" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^29.7.0" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.7.0.tgz#4e836cf60e99c6fcfabe9f99d017f3fdd50a6347" + integrity sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw== + dependencies: + "@jest/types" "^29.6.3" + "@types/node" "*" + jest-util "^29.7.0" + +jest-pnp-resolver@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" + integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== + +jest-regex-util@^29.6.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" + integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== + +jest-resolve-dependencies@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz#1b04f2c095f37fc776ff40803dc92921b1e88428" + integrity sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== + dependencies: + jest-regex-util "^29.6.3" + jest-snapshot "^29.7.0" + +jest-resolve@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.7.0.tgz#64d6a8992dd26f635ab0c01e5eef4399c6bcbc30" + integrity sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA== + dependencies: + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.7.0" + jest-pnp-resolver "^1.2.2" + jest-util "^29.7.0" + jest-validate "^29.7.0" + resolve "^1.20.0" + resolve.exports "^2.0.0" + slash "^3.0.0" + +jest-runner@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.7.0.tgz#809af072d408a53dcfd2e849a4c976d3132f718e" + integrity sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ== + dependencies: + "@jest/console" "^29.7.0" + "@jest/environment" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.13.1" + graceful-fs "^4.2.9" + jest-docblock "^29.7.0" + jest-environment-node "^29.7.0" + jest-haste-map "^29.7.0" + jest-leak-detector "^29.7.0" + jest-message-util "^29.7.0" + jest-resolve "^29.7.0" + jest-runtime "^29.7.0" + jest-util "^29.7.0" + jest-watcher "^29.7.0" + jest-worker "^29.7.0" + p-limit "^3.1.0" + source-map-support "0.5.13" + +jest-runtime@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.7.0.tgz#efecb3141cf7d3767a3a0cc8f7c9990587d3d817" + integrity sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ== + dependencies: + "@jest/environment" "^29.7.0" + "@jest/fake-timers" "^29.7.0" + "@jest/globals" "^29.7.0" + "@jest/source-map" "^29.6.3" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^29.7.0" + jest-message-util "^29.7.0" + jest-mock "^29.7.0" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-snapshot@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.7.0.tgz#c2c574c3f51865da1bb329036778a69bf88a6be5" + integrity sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw== + dependencies: + "@babel/core" "^7.11.6" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-jsx" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/types" "^7.3.3" + "@jest/expect-utils" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^29.7.0" + graceful-fs "^4.2.9" + jest-diff "^29.7.0" + jest-get-type "^29.6.3" + jest-matcher-utils "^29.7.0" + jest-message-util "^29.7.0" + jest-util "^29.7.0" + natural-compare "^1.4.0" + pretty-format "^29.7.0" + semver "^7.5.3" + +jest-util@^29.0.0, jest-util@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc" + integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== + dependencies: + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.7.0.tgz#7bf705511c64da591d46b15fce41400d52147d9c" + integrity sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw== + dependencies: + "@jest/types" "^29.6.3" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^29.6.3" + leven "^3.1.0" + pretty-format "^29.7.0" + +jest-watcher@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.7.0.tgz#7810d30d619c3a62093223ce6bb359ca1b28a2f2" + integrity sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g== + dependencies: + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.13.1" + jest-util "^29.7.0" + string-length "^4.0.1" + +jest-worker@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" + integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== + dependencies: + "@types/node" "*" + jest-util "^29.7.0" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest/-/jest-29.7.0.tgz#994676fc24177f088f1c5e3737f5697204ff2613" + integrity sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw== + dependencies: + "@jest/core" "^29.7.0" + "@jest/types" "^29.6.3" + import-local "^3.0.2" + jest-cli "^29.7.0" + joi@^17.3.0: version "17.4.0" resolved "https://registry.yarnpkg.com/joi/-/joi-17.4.0.tgz#b5c2277c8519e016316e49ababd41a1908d9ef20" @@ -2897,6 +4254,14 @@ js-tokens@^4.0.0: resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" @@ -2904,6 +4269,11 @@ js-yaml@^4.1.0: dependencies: argparse "^2.0.1" +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + jsesc@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" @@ -2934,6 +4304,11 @@ json-stringify-safe@^5.0.1: resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= +json5@^2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" @@ -2965,6 +4340,11 @@ kind-of@^6.0.0, kind-of@^6.0.2: resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + knex@0.21.1: version "0.21.1" resolved "https://registry.yarnpkg.com/knex/-/knex-0.21.1.tgz#4fba7e6c58c9f459846c3090be157a732fc75e41" @@ -2991,6 +4371,11 @@ lazy@~1.0.11: resolved "https://registry.yarnpkg.com/lazy/-/lazy-1.0.11.tgz#daa068206282542c088288e975c297c1ae77b690" integrity sha1-2qBoIGKCVCwIgojpdcKXwa53tpA= +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + levn@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" @@ -3083,6 +4468,11 @@ lodash.get@^4.4.2: resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + lodash.merge@^4.6.2: version "4.6.2" resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" @@ -3143,7 +4533,14 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" -make-error@^1.1.1: +make-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== + dependencies: + semver "^7.5.3" + +make-error@1.x, make-error@^1.1.1: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -3155,6 +4552,13 @@ make-iterator@^1.0.0: dependencies: kind-of "^6.0.2" +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + map-cache@^0.2.0, map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" @@ -3236,7 +4640,7 @@ min-indent@^1.0.0: resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== -"minimatch@2 || 3", minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2: +"minimatch@2 || 3", minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== @@ -3372,6 +4776,16 @@ node-fetch@^2.6.7: dependencies: whatwg-url "^5.0.0" +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.13: + version "2.0.13" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" + integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== + normalize-package-data@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" @@ -3387,6 +4801,13 @@ normalize-path@^3.0.0, normalize-path@~3.0.0: resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + npm-run-path@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.1.0.tgz#bc62f7f3f6952d9894bd08944ba011a6ee7b7e00" @@ -3443,6 +4864,11 @@ object.pick@^1.2.0, object.pick@^1.3.0: dependencies: isobject "^3.0.1" +obuf@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -3450,7 +4876,7 @@ once@^1.3.0: dependencies: wrappy "1" -onetime@^5.1.0: +onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== @@ -3495,7 +4921,7 @@ p-limit@^2.2.0: dependencies: p-try "^2.0.0" -p-limit@^3.0.2: +p-limit@^3.0.2, p-limit@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== @@ -3571,7 +4997,7 @@ parse-filepath@^1.0.1: map-cache "^0.2.0" path-root "^0.1.1" -parse-json@^5.0.0: +parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== @@ -3601,7 +5027,7 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= -path-key@^3.1.0: +path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== @@ -3653,12 +5079,17 @@ pg-int8@1.0.1: resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== +pg-numeric@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pg-numeric/-/pg-numeric-1.0.2.tgz#816d9a44026086ae8ae74839acd6a09b0636aa3a" + integrity sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw== + pg-pool@^3.6.1: version "3.6.1" resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.1.tgz#5a902eda79a8d7e3c928b77abf776b3cb7d351f7" integrity sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og== -pg-protocol@^1.6.0: +pg-protocol@*, pg-protocol@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833" integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q== @@ -3674,6 +5105,19 @@ pg-types@^2.1.0: postgres-date "~1.0.4" postgres-interval "^1.1.0" +pg-types@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-4.0.1.tgz#31857e89d00a6c66b06a14e907c3deec03889542" + integrity sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g== + dependencies: + pg-int8 "1.0.1" + pg-numeric "1.0.2" + postgres-array "~3.0.1" + postgres-bytea "~3.0.0" + postgres-date "~2.0.1" + postgres-interval "^3.0.0" + postgres-range "^1.1.1" + pg@^8.11.3: version "8.11.3" resolved "https://registry.yarnpkg.com/pg/-/pg-8.11.3.tgz#d7db6e3fe268fcedd65b8e4599cda0b8b4bf76cb" @@ -3696,12 +5140,17 @@ pgpass@1.x: dependencies: split2 "^3.1.1" +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + picomatch@^2.0.4, picomatch@^2.2.1: version "2.2.3" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d" integrity sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg== -picomatch@^2.3.1: +picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== @@ -3725,6 +5174,18 @@ pidusage@~3.0: dependencies: safe-buffer "^5.2.1" +pirates@^4.0.4: + version "4.0.6" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + pluralize@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" @@ -3820,16 +5281,33 @@ postgres-array@~2.0.0: resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e" integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA== +postgres-array@~3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-3.0.2.tgz#68d6182cb0f7f152a7e60dc6a6889ed74b0a5f98" + integrity sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog== + postgres-bytea@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35" integrity sha1-AntTPAqokOJtFy1Hz5zOzFIazTU= +postgres-bytea@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-3.0.0.tgz#9048dc461ac7ba70a6a42d109221619ecd1cb089" + integrity sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw== + dependencies: + obuf "~1.1.2" + postgres-date@~1.0.4: version "1.0.7" resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8" integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q== +postgres-date@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-2.0.1.tgz#638b62e5c33764c292d37b08f5257ecb09231457" + integrity sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw== + postgres-interval@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695" @@ -3837,6 +5315,16 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" +postgres-interval@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-3.0.0.tgz#baf7a8b3ebab19b7f38f07566c7aab0962f0c86a" + integrity sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw== + +postgres-range@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/postgres-range/-/postgres-range-1.1.3.tgz#9ccd7b01ca2789eb3c2e0888b3184225fa859f76" + integrity sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g== + prelude-ls@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" @@ -3857,6 +5345,15 @@ prettier@3.0.3: resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643" integrity sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg== +pretty-format@^29.0.0, pretty-format@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" + integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== + dependencies: + "@jest/schemas" "^29.6.3" + ansi-styles "^5.0.0" + react-is "^18.0.0" + promptly@^2: version "2.2.0" resolved "https://registry.yarnpkg.com/promptly/-/promptly-2.2.0.tgz#2a13fa063688a2a5983b161fff0108a07d26fc74" @@ -3864,6 +5361,14 @@ promptly@^2: dependencies: read "^1.0.4" +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + proxy-agent@~5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/proxy-agent/-/proxy-agent-5.0.0.tgz#d31405c10d6e8431fde96cba7a0c027ce01d633b" @@ -3888,6 +5393,11 @@ punycode@^2.1.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +pure-rand@^6.0.0: + version "6.0.4" + resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.0.4.tgz#50b737f6a925468679bff00ad20eade53f37d5c7" + integrity sha512-LA0Y9kxMYv47GIPJy6MI84fqTd2HmYZI83W/kM/SkKfDlajnZYfmXFTxkbY+xSBPkLJxltMa9hIkmdc29eguMA== + queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -3903,6 +5413,11 @@ raw-body@^2.2.0: iconv-lite "0.4.24" unpipe "1.0.0" +react-is@^18.0.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + read-pkg-up@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" @@ -4004,6 +5519,11 @@ repeat-string@^1.6.1: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + require-in-the-middle@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz#b768f800377b47526d026bbf5a7f727f16eb412f" @@ -4013,6 +5533,13 @@ require-in-the-middle@^5.0.0: module-details-from-path "^1.0.3" resolve "^1.12.0" +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" @@ -4026,11 +5553,21 @@ resolve-from@^4.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= +resolve.exports@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" + integrity sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg== + resolve@^1.0.0, resolve@^1.1.6, resolve@^1.1.7, resolve@^1.12.0: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" @@ -4039,7 +5576,7 @@ resolve@^1.0.0, resolve@^1.1.6, resolve@^1.1.7, resolve@^1.12.0: is-core-module "^2.2.0" path-parse "^1.0.6" -resolve@^1.10.0: +resolve@^1.10.0, resolve@^1.20.0: version "1.22.8" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== @@ -4146,7 +5683,12 @@ semver@6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.2, semver@^7.3.2, semver@^7.5.4: +semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.2, semver@^7.3.2, semver@^7.5.3, semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== @@ -4203,11 +5745,21 @@ signal-exit@^3.0.2, signal-exit@^3.0.3: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== +signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + signal-exit@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + slash@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" @@ -4284,6 +5836,14 @@ source-map-resolve@^0.5.0: source-map-url "^0.4.0" urix "^0.1.0" +source-map-support@0.5.13: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + source-map-support@0.5.21: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" @@ -4310,7 +5870,7 @@ source-map@^0.5.6: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= -source-map@^0.6.0, source-map@~0.6.1: +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== @@ -4365,6 +5925,13 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= +stack-utils@^2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" + integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== + dependencies: + escape-string-regexp "^2.0.0" + static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" @@ -4383,6 +5950,23 @@ string-argv@0.3.2: resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.2.tgz#2b6d0ef24b656274d957d54e0a4bbf6153dc02b6" integrity sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q== +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^5.0.0: version "5.0.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.0.1.tgz#0d8158335a6cfd8eb95da9b6b262ce314a036ffd" @@ -4413,7 +5997,7 @@ string_decoder@~0.10.x: resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= -strip-ansi@^6.0.1: +strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -4432,6 +6016,16 @@ strip-bom@^3.0.0: resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + strip-final-newline@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd" @@ -4468,6 +6062,13 @@ supports-color@^7.1.0: dependencies: has-flag "^4.0.0" +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" @@ -4483,6 +6084,15 @@ tarn@^3.0.0: resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.1.tgz#ebac2c6dbc6977d34d4526e0a7814200386a8aec" integrity sha512-6usSlV9KyHsspvwu2duKH+FMUhqJnAh6J5J/4MITl8s94iSUQTLkJggdiewKv4RyARQccnigV48Z+khiuVZDJw== +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" @@ -4493,6 +6103,16 @@ tildify@2.0.0: resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a" integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw== +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" @@ -4545,6 +6165,20 @@ ts-api-utils@^1.0.1: resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== +ts-jest@^29.1.1: + version "29.1.1" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b" + integrity sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA== + dependencies: + bs-logger "0.x" + fast-json-stable-stringify "2.x" + jest-util "^29.0.0" + json5 "^2.2.3" + lodash.memoize "4.x" + make-error "1.x" + semver "^7.5.3" + yargs-parser "^21.0.1" + ts-node-dev@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ts-node-dev/-/ts-node-dev-2.0.0.tgz#bdd53e17ab3b5d822ef519928dc6b4a7e0f13065" @@ -4649,11 +6283,21 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + type-fest@^0.20.2: version "0.20.2" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + type-fest@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" @@ -4731,6 +6375,14 @@ unset-value@^1.0.0: has-value "^0.3.1" isobject "^3.0.0" +update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" + integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" @@ -4773,6 +6425,15 @@ v8-compile-cache-lib@^3.0.1: resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== +v8-to-istanbul@^9.0.1: + version "9.1.3" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.1.3.tgz#ea456604101cd18005ac2cae3cdd1aa058a6306b" + integrity sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg== + dependencies: + "@jridgewell/trace-mapping" "^0.3.12" + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^2.0.0" + v8flags@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656" @@ -4816,6 +6477,13 @@ vm2@^3.9.3: acorn "^8.7.0" acorn-walk "^8.2.0" +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" @@ -4853,6 +6521,15 @@ word-wrap@~1.2.3: resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.4.tgz#cb4b50ec9aca570abd1f52f33cd45b6c61739a9f" integrity sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA== +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^8.0.1, wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" @@ -4867,6 +6544,14 @@ wrappy@1: resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= +write-file-atomic@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== + dependencies: + imurmurhash "^0.1.4" + signal-exit "^3.0.7" + ws@^7.0.0: version "7.5.5" resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.5.tgz#8b4bc4af518cfabd0473ae4f99144287b33eb881" @@ -4895,6 +6580,11 @@ xtend@^4.0.0: resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" @@ -4918,6 +6608,24 @@ yamljs@0.3.0: argparse "^1.0.7" glob "^7.0.5" +yargs-parser@^21.0.1, yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs@^17.3.1: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yn@3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" From 3dc10a8a4564e3a413b418de735a523a53870c86 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 08:08:35 +0200 Subject: [PATCH 02/67] Add containers configuration and bash --- bin/test.sh | 83 ++ .../services/solr/confs/conf/accents_ar.txt | 0 .../services/solr/confs/conf/accents_en.txt | 164 ++++ .../services/solr/confs/conf/accents_es.txt | 164 ++++ .../services/solr/confs/conf/accents_fr.txt | 164 ++++ .../services/solr/confs/conf/accents_und.txt | 148 ++++ config/services/solr/confs/conf/elevate.xml | 27 + config/services/solr/confs/conf/nouns_ar.txt | 0 config/services/solr/confs/conf/nouns_es.txt | 0 config/services/solr/confs/conf/nouns_fr.txt | 0 .../services/solr/confs/conf/protwords_ar.txt | 0 .../services/solr/confs/conf/protwords_en.txt | 7 + .../services/solr/confs/conf/protwords_es.txt | 0 .../services/solr/confs/conf/protwords_fr.txt | 0 .../solr/confs/conf/protwords_und.txt | 0 config/services/solr/confs/conf/schema.xml | 499 +++++++++++ .../solr/confs/conf/schema_extra_fields.xml | 425 +++++++++ .../solr/confs/conf/schema_extra_types.xml | 481 +++++++++++ .../services/solr/confs/conf/solrconfig.xml | 810 ++++++++++++++++++ .../solr/confs/conf/solrconfig_extra.xml | 281 ++++++ .../solr/confs/conf/solrconfig_index.xml | 0 .../solr/confs/conf/solrconfig_query.xml | 47 + .../conf/solrconfig_requestdispatcher.xml | 6 + .../solr/confs/conf/solrcore.properties | 9 + .../services/solr/confs/conf/stopwords_ar.txt | 119 +++ .../services/solr/confs/conf/stopwords_en.txt | 35 + .../services/solr/confs/conf/stopwords_es.txt | 308 +++++++ .../services/solr/confs/conf/stopwords_fr.txt | 163 ++++ .../solr/confs/conf/stopwords_und.txt | 0 .../services/solr/confs/conf/synonyms_ar.txt | 1 + .../services/solr/confs/conf/synonyms_en.txt | 1 + .../services/solr/confs/conf/synonyms_es.txt | 1 + .../services/solr/confs/conf/synonyms_fr.txt | 1 + .../services/solr/confs/conf/synonyms_und.txt | 1 + config/services/solr/run/entrypoint | 9 + config/services/solr/run/solr | 20 + src/data-providers/postgres/index.ts | 4 +- tests/docker-compose.yml | 39 + 38 files changed, 4015 insertions(+), 2 deletions(-) create mode 100755 bin/test.sh create mode 100644 config/services/solr/confs/conf/accents_ar.txt create mode 100644 config/services/solr/confs/conf/accents_en.txt create mode 100644 config/services/solr/confs/conf/accents_es.txt create mode 100644 config/services/solr/confs/conf/accents_fr.txt create mode 100644 config/services/solr/confs/conf/accents_und.txt create mode 100644 config/services/solr/confs/conf/elevate.xml create mode 100644 config/services/solr/confs/conf/nouns_ar.txt create mode 100644 config/services/solr/confs/conf/nouns_es.txt create mode 100644 config/services/solr/confs/conf/nouns_fr.txt create mode 100644 config/services/solr/confs/conf/protwords_ar.txt create mode 100644 config/services/solr/confs/conf/protwords_en.txt create mode 100644 config/services/solr/confs/conf/protwords_es.txt create mode 100644 config/services/solr/confs/conf/protwords_fr.txt create mode 100644 config/services/solr/confs/conf/protwords_und.txt create mode 100644 config/services/solr/confs/conf/schema.xml create mode 100644 config/services/solr/confs/conf/schema_extra_fields.xml create mode 100644 config/services/solr/confs/conf/schema_extra_types.xml create mode 100644 config/services/solr/confs/conf/solrconfig.xml create mode 100644 config/services/solr/confs/conf/solrconfig_extra.xml create mode 100644 config/services/solr/confs/conf/solrconfig_index.xml create mode 100644 config/services/solr/confs/conf/solrconfig_query.xml create mode 100644 config/services/solr/confs/conf/solrconfig_requestdispatcher.xml create mode 100644 config/services/solr/confs/conf/solrcore.properties create mode 100644 config/services/solr/confs/conf/stopwords_ar.txt create mode 100644 config/services/solr/confs/conf/stopwords_en.txt create mode 100644 config/services/solr/confs/conf/stopwords_es.txt create mode 100644 config/services/solr/confs/conf/stopwords_fr.txt create mode 100644 config/services/solr/confs/conf/stopwords_und.txt create mode 100644 config/services/solr/confs/conf/synonyms_ar.txt create mode 100644 config/services/solr/confs/conf/synonyms_en.txt create mode 100644 config/services/solr/confs/conf/synonyms_es.txt create mode 100644 config/services/solr/confs/conf/synonyms_fr.txt create mode 100644 config/services/solr/confs/conf/synonyms_und.txt create mode 100755 config/services/solr/run/entrypoint create mode 100755 config/services/solr/run/solr create mode 100644 tests/docker-compose.yml diff --git a/bin/test.sh b/bin/test.sh new file mode 100755 index 00000000..b2dc31ac --- /dev/null +++ b/bin/test.sh @@ -0,0 +1,83 @@ +root=$(pwd) + +#Global variables +USAGE='this is the usage' +DEBUG_USAGE='this is the debug usage' +KEEP=0 +ONLY_CONTAINERS=0 +STOP_CONTAINERSq=0 +COMMAND_ARGS='--' + +function moveToTestDir { + echo 'Moving to tests dir' + cd ${root}/tests +} + +function moveToRootDir { + echo 'Moving to root dir' + cd ${root} +} + +## obtain options +while [ "$1" != "" ]; do + case $1 in + -d | --debug ) echo "Debug usage" + echo "$DEBUG_USAGE" + exit 0 + ;; + -oc | --only-containers ) ONLY_CONTAINERS=1 + ;; + -sc | --stop-containers ) STOP_CONTAINERS=1 + ;; + -k | --keep ) KEEP=1 + ;; + -h | --help ) echo "$USAGE" + exit + ;; + --) shift + while [ "$1" != "" ]; do + COMMAND_ARGS="${COMMAND_ARGS} $1" + shift + done + ;; + * ) echo "$USAGE" + exit 1 + esac + shift +done + +## STOP_CONTAINERS is a final option +if [ $STOP_CONTAINERS -eq 1 ]; then + echo 'Stopping docker containers' + moveToTestDir + docker-compose down + exit 0 +fi + +## ONLY_CONTAINERS must be 1 and STOP must be 0 +if [ $ONLY_CONTAINERS -eq 1 ] && [ $STOP -eq 1 ]; then + echo 'Invalid options - when using option -oc, option -ns must be used as well' + echo "$usage" + exit 1 +fi + +## should we check if docker is running? +echo 'Starting docker containers' +moveToTestDir +docker-compose up -d + +if [ $ONLY_CONTAINERS -eq 1 ]; then + exit 0 +fi + +## run tests +echo 'Running tests' +moveToRootDir +yarn jest + +if [ $KEEP -eq 0 ]; then + ## stop docker containers + echo 'Stopping docker containers' + moveToTestDir + docker-compose down +fi \ No newline at end of file diff --git a/config/services/solr/confs/conf/accents_ar.txt b/config/services/solr/confs/conf/accents_ar.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/accents_en.txt b/config/services/solr/confs/conf/accents_en.txt new file mode 100644 index 00000000..bed05147 --- /dev/null +++ b/config/services/solr/confs/conf/accents_en.txt @@ -0,0 +1,164 @@ +# À => A +"\u00C0" => "A" +# Á => A +"\u00C1" => "A" +#  => A +"\u00C2" => "A" +# à => A +"\u00C3" => "A" +# Ä => A +"\u00C4" => "A" +# Å => A +"\u00C5" => "A" +# Ą => A +"\u0104" => "A" +# Æ => AE +"\u00C6" => "AE" +# Ç => C +"\u00C7" => "C" +# Ć => C +"\U0106" => "C" +# È => E +"\u00C8" => "E" +# É => E +"\u00C9" => "E" +# Ê => E +"\u00CA" => "E" +# Ë => E +"\u00CB" => "E" +# Ę => E +"\u0118" => "E" +# Ì => I +"\u00CC" => "I" +# Í => I +"\u00CD" => "I" +# Î => I +"\u00CE" => "I" +# Ï => I +"\u00CF" => "I" +# IJ => IJ +"\u0132" => "IJ" +# Ð => D +"\u00D0" => "D" +# Ł => L +"\u0141" => "L" +# Ñ => N +"\u00D1" => "N" +# Ń => N +"\u0143" => "N" +# Ò => O +"\u00D2" => "O" +# Ó => O +"\u00D3" => "O" +# Ô => O +"\u00D4" => "O" +# Õ => O +"\u00D5" => "O" +# Ö => O +"\u00D6" => "O" +# Ø => O +"\u00D8" => "O" +# Œ => OE +"\u0152" => "OE" +# Þ +"\u00DE" => "TH" +# Ù => U +"\u00D9" => "U" +# Ú => U +"\u00DA" => "U" +# Û => U +"\u00DB" => "U" +# Ü => U +"\u00DC" => "U" +# Ý => Y +"\u00DD" => "Y" +# Ÿ => Y +"\u0178" => "Y" +# à => a +"\u00E0" => "a" +# á => a +"\u00E1" => "a" +# â => a +"\u00E2" => "a" +# ã => a +"\u00E3" => "a" +# ä => a +"\u00E4" => "a" +# å => a +"\u00E5" => "a" +# æ => ae +"\u00E6" => "ae" +# ç => c +"\u00E7" => "c" +# è => e +"\u00E8" => "e" +# é => e +"\u00E9" => "e" +# ê => e +"\u00EA" => "e" +# ë => e +"\u00EB" => "e" +# ì => i +"\u00EC" => "i" +# í => i +"\u00ED" => "i" +# î => i +"\u00EE" => "i" +# ï => i +"\u00EF" => "i" +# ij => ij +"\u0133" => "ij" +# ð => d +"\u00F0" => "d" +# ñ => n +"\u00F1" => "n" +# ò => o +"\u00F2" => "o" +# ó => o +"\u00F3" => "o" +# ô => o +"\u00F4" => "o" +# õ => o +"\u00F5" => "o" +# ö => o +"\u00F6" => "o" +# ø => o +"\u00F8" => "o" +# œ => oe +"\u0153" => "oe" +# ß => ss +"\u00DF" => "ss" +# Ś => S +"\u015a" => "S" +# þ => th +"\u00FE" => "th" +# ù => u +"\u00F9" => "u" +# ú => u +"\u00FA" => "u" +# û => u +"\u00FB" => "u" +# ü => u +"\u00FC" => "u" +# ý => y +"\u00FD" => "y" +# ÿ => y +"\u00FF" => "y" +# Ź => Z +"\u0179" => "Z" +# Ż => Z +"\u017b" => "Z" +# ff => ff +"\uFB00" => "ff" +# fi => fi +"\uFB01" => "fi" +# fl => fl +"\uFB02" => "fl" +# ffi => ffi +"\uFB03" => "ffi" +# ffl => ffl +"\uFB04" => "ffl" +# ſt => st +"\uFB05" => "st" +# st => st +"\uFB06" => "st" diff --git a/config/services/solr/confs/conf/accents_es.txt b/config/services/solr/confs/conf/accents_es.txt new file mode 100644 index 00000000..265e5e3d --- /dev/null +++ b/config/services/solr/confs/conf/accents_es.txt @@ -0,0 +1,164 @@ +# À => A +"\u00C0" => "A" +# Á => A +#"\u00C1" => "A" +#  => A +"\u00C2" => "A" +# à => A +"\u00C3" => "A" +# Ä => A +"\u00C4" => "A" +# Å => A +"\u00C5" => "A" +# Ą => A +"\u0104" => "A" +# Æ => AE +"\u00C6" => "AE" +# Ç => C +"\u00C7" => "C" +# Ć => C +"\U0106" => "C" +# È => E +"\u00C8" => "E" +# É => E +#"\u00C9" => "E" +# Ê => E +"\u00CA" => "E" +# Ë => E +"\u00CB" => "E" +# Ę => E +"\u0118" => "E" +# Ì => I +"\u00CC" => "I" +# Í => I +#"\u00CD" => "I" +# Î => I +"\u00CE" => "I" +# Ï => I +"\u00CF" => "I" +# IJ => IJ +"\u0132" => "IJ" +# Ð => D +"\u00D0" => "D" +# Ł => L +"\u0141" => "L" +# Ñ => N +#"\u00D1" => "N" +# Ń => N +"\u0143" => "N" +# Ò => O +"\u00D2" => "O" +# Ó => O +#"\u00D3" => "O" +# Ô => O +"\u00D4" => "O" +# Õ => O +"\u00D5" => "O" +# Ö => O +"\u00D6" => "O" +# Ø => O +"\u00D8" => "O" +# Œ => OE +"\u0152" => "OE" +# Þ +"\u00DE" => "TH" +# Ù => U +"\u00D9" => "U" +# Ú => U +#"\u00DA" => "U" +# Û => U +"\u00DB" => "U" +# Ü => U +"\u00DC" => "U" +# Ý => Y +"\u00DD" => "Y" +# Ÿ => Y +"\u0178" => "Y" +# à => a +"\u00E0" => "a" +# á => a +#"\u00E1" => "a" +# â => a +"\u00E2" => "a" +# ã => a +"\u00E3" => "a" +# ä => a +"\u00E4" => "a" +# å => a +"\u00E5" => "a" +# æ => ae +"\u00E6" => "ae" +# ç => c +"\u00E7" => "c" +# è => e +"\u00E8" => "e" +# é => e +#"\u00E9" => "e" +# ê => e +"\u00EA" => "e" +# ë => e +"\u00EB" => "e" +# ì => i +"\u00EC" => "i" +# í => i +#"\u00ED" => "i" +# î => i +"\u00EE" => "i" +# ï => i +"\u00EF" => "i" +# ij => ij +"\u0133" => "ij" +# ð => d +"\u00F0" => "d" +# ñ => n +#"\u00F1" => "n" +# ò => o +"\u00F2" => "o" +# ó => o +#"\u00F3" => "o" +# ô => o +"\u00F4" => "o" +# õ => o +"\u00F5" => "o" +# ö => o +"\u00F6" => "o" +# ø => o +"\u00F8" => "o" +# œ => oe +"\u0153" => "oe" +# ß => ss +"\u00DF" => "ss" +# Ś => S +"\u015a" => "S" +# þ => th +"\u00FE" => "th" +# ù => u +"\u00F9" => "u" +# ú => u +#"\u00FA" => "u" +# û => u +"\u00FB" => "u" +# ü => u +"\u00FC" => "u" +# ý => y +"\u00FD" => "y" +# ÿ => y +"\u00FF" => "y" +# Ź => Z +"\u0179" => "Z" +# Ż => Z +"\u017b" => "Z" +# ff => ff +"\uFB00" => "ff" +# fi => fi +"\uFB01" => "fi" +# fl => fl +"\uFB02" => "fl" +# ffi => ffi +"\uFB03" => "ffi" +# ffl => ffl +"\uFB04" => "ffl" +# ſt => st +"\uFB05" => "st" +# st => st +"\uFB06" => "st" diff --git a/config/services/solr/confs/conf/accents_fr.txt b/config/services/solr/confs/conf/accents_fr.txt new file mode 100644 index 00000000..7f66a29c --- /dev/null +++ b/config/services/solr/confs/conf/accents_fr.txt @@ -0,0 +1,164 @@ +# À => A +"\u00C0" => "A" +# Á => A +#"\u00C1" => "A" +#  => A +"\u00C2" => "A" +# à => A +#"\u00C3" => "A" +# Ä => A +#"\u00C4" => "A" +# Å => A +#"\u00C5" => "A" +# Ą => A +#"\u0104" => "A" +# Æ => AE +"\u00C6" => "AE" +# Ç => C +"\u00C7" => "C" +# Ć => C +#"\U0106" => "C" +# È => E +"\u00C8" => "E" +# É => E +"\u00C9" => "E" +# Ê => E +"\u00CA" => "E" +# Ë => E +#"\u00CB" => "E" +# Ę => E +#"\u0118" => "E" +# Ì => I +#"\u00CC" => "I" +# Í => I +#"\u00CD" => "I" +# Î => I +"\u00CE" => "I" +# Ï => I +#"\u00CF" => "I" +# IJ => IJ +#"\u0132" => "IJ" +# Ð => D +#"\u00D0" => "D" +# Ł => L +#"\u0141" => "L" +# Ñ => N +#"\u00D1" => "N" +# Ń => N +#"\u0143" => "N" +# Ò => O +#"\u00D2" => "O" +# Ó => O +#"\u00D3" => "O" +# Ô => O +"\u00D4" => "O" +# Õ => O +#"\u00D5" => "O" +# Ö => O +#"\u00D6" => "O" +# Ø => O +#"\u00D8" => "O" +# Œ => OE +"\u0152" => "OE" +# Þ +#"\u00DE" => "TH" +# Ù => U +"\u00D9" => "U" +# Ú => U +#"\u00DA" => "U" +# Û => U +"\u00DB" => "U" +# Ü => U +#"\u00DC" => "U" +# Ý => Y +#"\u00DD" => "Y" +# Ÿ => Y +#"\u0178" => "Y" +# à => a +"\u00E0" => "a" +# á => a +#"\u00E1" => "a" +# â => a +"\u00E2" => "a" +# ã => a +#"\u00E3" => "a" +# ä => a +#"\u00E4" => "a" +# å => a +#"\u00E5" => "a" +# æ => ae +"\u00E6" => "ae" +# ç => c +"\u00E7" => "c" +# è => e +"\u00E8" => "e" +# é => e +"\u00E9" => "e" +# ê => e +"\u00EA" => "e" +# ë => e +#"\u00EB" => "e" +# ì => i +#"\u00EC" => "i" +# í => i +#"\u00ED" => "i" +# î => i +"\u00EE" => "i" +# ï => i +#"\u00EF" => "i" +# ij => ij +#"\u0133" => "ij" +# ð => d +#"\u00F0" => "d" +# ñ => n +#"\u00F1" => "n" +# ò => o +#"\u00F2" => "o" +# ó => o +#"\u00F3" => "o" +# ô => o +"\u00F4" => "o" +# õ => o +#"\u00F5" => "o" +# ö => o +#"\u00F6" => "o" +# ø => o +#"\u00F8" => "o" +# œ => oe +"\u0153" => "oe" +# ß => ss +#"\u00DF" => "ss" +# Ś => S +#"\u015a" => "S" +# þ => th +#"\u00FE" => "th" +# ù => u +"\u00F9" => "u" +# ú => u +#"\u00FA" => "u" +# û => u +"\u00FB" => "u" +# ü => u +"\u00FC" => "u" +# ý => y +#"\u00FD" => "y" +# ÿ => y +#"\u00FF" => "y" +# Ź => Z +#"\u0179" => "Z" +# Ż => Z +#"\u017b" => "Z" +# ff => ff +#"\uFB00" => "ff" +# fi => fi +#"\uFB01" => "fi" +# fl => fl +#"\uFB02" => "fl" +# ffi => ffi +#"\uFB03" => "ffi" +# ffl => ffl +#"\uFB04" => "ffl" +# ſt => st +#"\uFB05" => "st" +# st => st +#"\uFB06" => "st" diff --git a/config/services/solr/confs/conf/accents_und.txt b/config/services/solr/confs/conf/accents_und.txt new file mode 100644 index 00000000..7c883f87 --- /dev/null +++ b/config/services/solr/confs/conf/accents_und.txt @@ -0,0 +1,148 @@ +# À => A +"\u00C0" => "A" +# Á => A +"\u00C1" => "A" +#  => A +"\u00C2" => "A" +# à => A +"\u00C3" => "A" +# Ä => A +"\u00C4" => "A" +# Å => A +"\u00C5" => "A" +# Æ => AE +"\u00C6" => "AE" +# Ç => C +"\u00C7" => "C" +# È => E +"\u00C8" => "E" +# É => E +"\u00C9" => "E" +# Ê => E +"\u00CA" => "E" +# Ë => E +"\u00CB" => "E" +# Ì => I +"\u00CC" => "I" +# Í => I +"\u00CD" => "I" +# Î => I +"\u00CE" => "I" +# Ï => I +"\u00CF" => "I" +# IJ => IJ +"\u0132" => "IJ" +# Ð => D +"\u00D0" => "D" +# Ñ => N +"\u00D1" => "N" +# Ò => O +"\u00D2" => "O" +# Ó => O +"\u00D3" => "O" +# Ô => O +"\u00D4" => "O" +# Õ => O +"\u00D5" => "O" +# Ö => O +"\u00D6" => "O" +# Ø => O +"\u00D8" => "O" +# Œ => OE +"\u0152" => "OE" +# Þ +"\u00DE" => "TH" +# Ù => U +"\u00D9" => "U" +# Ú => U +"\u00DA" => "U" +# Û => U +"\u00DB" => "U" +# Ü => U +"\u00DC" => "U" +# Ý => Y +"\u00DD" => "Y" +# Ÿ => Y +"\u0178" => "Y" +# à => a +"\u00E0" => "a" +# á => a +"\u00E1" => "a" +# â => a +"\u00E2" => "a" +# ã => a +"\u00E3" => "a" +# ä => a +"\u00E4" => "a" +# å => a +"\u00E5" => "a" +# æ => ae +"\u00E6" => "ae" +# ç => c +"\u00E7" => "c" +# è => e +"\u00E8" => "e" +# é => e +"\u00E9" => "e" +# ê => e +"\u00EA" => "e" +# ë => e +"\u00EB" => "e" +# ì => i +"\u00EC" => "i" +# í => i +"\u00ED" => "i" +# î => i +"\u00EE" => "i" +# ï => i +"\u00EF" => "i" +# ij => ij +"\u0133" => "ij" +# ð => d +"\u00F0" => "d" +# ñ => n +"\u00F1" => "n" +# ò => o +"\u00F2" => "o" +# ó => o +"\u00F3" => "o" +# ô => o +"\u00F4" => "o" +# õ => o +"\u00F5" => "o" +# ö => o +"\u00F6" => "o" +# ø => o +"\u00F8" => "o" +# œ => oe +"\u0153" => "oe" +# ß => ss +"\u00DF" => "ss" +# þ => th +"\u00FE" => "th" +# ù => u +"\u00F9" => "u" +# ú => u +"\u00FA" => "u" +# û => u +"\u00FB" => "u" +# ü => u +"\u00FC" => "u" +# ý => y +"\u00FD" => "y" +# ÿ => y +"\u00FF" => "y" +# ff => ff +"\uFB00" => "ff" +# fi => fi +"\uFB01" => "fi" +# fl => fl +"\uFB02" => "fl" +# ffi => ffi +"\uFB03" => "ffi" +# ffl => ffl +"\uFB04" => "ffl" +# ſt => st +"\uFB05" => "st" +# st => st +"\uFB06" => "st" diff --git a/config/services/solr/confs/conf/elevate.xml b/config/services/solr/confs/conf/elevate.xml new file mode 100644 index 00000000..193a0e72 --- /dev/null +++ b/config/services/solr/confs/conf/elevate.xml @@ -0,0 +1,27 @@ + + + + + + + + + + diff --git a/config/services/solr/confs/conf/nouns_ar.txt b/config/services/solr/confs/conf/nouns_ar.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/nouns_es.txt b/config/services/solr/confs/conf/nouns_es.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/nouns_fr.txt b/config/services/solr/confs/conf/nouns_fr.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/protwords_ar.txt b/config/services/solr/confs/conf/protwords_ar.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/protwords_en.txt b/config/services/solr/confs/conf/protwords_en.txt new file mode 100644 index 00000000..cda85814 --- /dev/null +++ b/config/services/solr/confs/conf/protwords_en.txt @@ -0,0 +1,7 @@ +#----------------------------------------------------------------------- +# This file blocks words from being operated on by the stemmer and word delimiter. +& +< +> +' +" diff --git a/config/services/solr/confs/conf/protwords_es.txt b/config/services/solr/confs/conf/protwords_es.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/protwords_fr.txt b/config/services/solr/confs/conf/protwords_fr.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/protwords_und.txt b/config/services/solr/confs/conf/protwords_und.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/schema.xml b/config/services/solr/confs/conf/schema.xml new file mode 100644 index 00000000..179f5886 --- /dev/null +++ b/config/services/solr/confs/conf/schema.xml @@ -0,0 +1,499 @@ + + + + +]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + &extrafields; + + + &extratypes; + + + id + + + + + diff --git a/config/services/solr/confs/conf/schema_extra_fields.xml b/config/services/solr/confs/conf/schema_extra_fields.xml new file mode 100644 index 00000000..c8eb41b4 --- /dev/null +++ b/config/services/solr/confs/conf/schema_extra_fields.xml @@ -0,0 +1,425 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/services/solr/confs/conf/schema_extra_types.xml b/config/services/solr/confs/conf/schema_extra_types.xml new file mode 100644 index 00000000..69ee225e --- /dev/null +++ b/config/services/solr/confs/conf/schema_extra_types.xml @@ -0,0 +1,481 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/services/solr/confs/conf/solrconfig.xml b/config/services/solr/confs/conf/solrconfig.xml new file mode 100644 index 00000000..e225b691 --- /dev/null +++ b/config/services/solr/confs/conf/solrconfig.xml @@ -0,0 +1,810 @@ + + + + + + +]> + + + + + + + ${solr.abortOnConfigurationError:true} + + + ${solr.luceneMatchVersion:8.11} + + + + + + + + + + + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + ${solr.hdfs.home:} + + ${solr.hdfs.confdir:} + + ${solr.hdfs.blockcache.enabled:true} + + ${solr.hdfs.blockcache.global:true} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + true + + + &index; + + + + + + + + + ${solr.ulog.dir:} + + + + + ${solr.autoCommit.MaxDocs:-1} + ${solr.autoCommit.MaxTime:15000} + + + + + + ${solr.autoSoftCommit.MaxDocs:-1} + ${solr.autoSoftCommit.MaxTime:5000} + + + + + + + + + + + + + + + + &query; + + + + + + + + + + + + static firstSearcher warming in solrconfig.xml + + + + + + false + + + + + + + + + &requestdispatcher; + + + + + + + &extra; + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 5 + + + + + + + + + + + + + + + diff --git a/config/services/solr/confs/conf/solrconfig_extra.xml b/config/services/solr/confs/conf/solrconfig_extra.xml new file mode 100644 index 00000000..78e5d31f --- /dev/null +++ b/config/services/solr/confs/conf/solrconfig_extra.xml @@ -0,0 +1,281 @@ + + + ar + spellcheck_ar + solr.DirectSolrSpellChecker + internal + 0.5 + 2 + 1 + 5 + 4 + 0.01 + .01 + true + + + + en + spellcheck_en + solr.DirectSolrSpellChecker + internal + 0.5 + 2 + 1 + 5 + 4 + 0.01 + .01 + true + + + + fr + spellcheck_fr + solr.DirectSolrSpellChecker + internal + 0.5 + 2 + 1 + 5 + 4 + 0.01 + .01 + true + + + + und + spellcheck_und + solr.DirectSolrSpellChecker + internal + 0.5 + 2 + 1 + 5 + 4 + 0.01 + .01 + true + + + + zh_hans + spellcheck_zh_hans + solr.DirectSolrSpellChecker + internal + 0.5 + 1 + 1 + 5 + 1 + 0.01 + true + + + + es + spellcheck_es + solr.DirectSolrSpellChecker + internal + 0.5 + 2 + 1 + 5 + 4 + 0.01 + .01 + true + + + + + ar + AnalyzingInfixLookupFactory + DocumentDictionaryFactory + twm_suggest + text_ar + sm_context_tags + true + false + + + + en + AnalyzingInfixLookupFactory + DocumentDictionaryFactory + twm_suggest + text_en + sm_context_tags + true + false + + + + fr + AnalyzingInfixLookupFactory + DocumentDictionaryFactory + twm_suggest + text_fr + sm_context_tags + true + false + + + + und + AnalyzingInfixLookupFactory + DocumentDictionaryFactory + twm_suggest + text_und + sm_context_tags + true + false + + + + es + AnalyzingInfixLookupFactory + DocumentDictionaryFactory + twm_suggest + text_es + sm_context_tags + true + false + + + + + + false + false + false + true + false + 1 + false + 10 + + + terms + spellcheck + suggest + + + + + + + true + ignored_ + true + links + ignored_ + + + + + + + 1 + 1 + false + ${solr.mlt.timeAllowed:2000} + + + + + + + lucene + id + explicit + true + ${solr.selectSearchHandler.timeAllowed:-1} + false + + + spellcheck + elevator + + + + + + + id + und + on + false + false + 1 + 5 + 5 + true + true + 10 + 5 + + + spellcheck + + + + + + + true + und + 10 + + + suggest + + + + + + + id + true + + + tvComponent + + + + + + string + elevate.xml + + + + diff --git a/config/services/solr/confs/conf/solrconfig_index.xml b/config/services/solr/confs/conf/solrconfig_index.xml new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/solrconfig_query.xml b/config/services/solr/confs/conf/solrconfig_query.xml new file mode 100644 index 00000000..5bdd6969 --- /dev/null +++ b/config/services/solr/confs/conf/solrconfig_query.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + true + + false + + 20 + 200 + 1024 diff --git a/config/services/solr/confs/conf/solrconfig_requestdispatcher.xml b/config/services/solr/confs/conf/solrconfig_requestdispatcher.xml new file mode 100644 index 00000000..3a3f17d1 --- /dev/null +++ b/config/services/solr/confs/conf/solrconfig_requestdispatcher.xml @@ -0,0 +1,6 @@ + + + diff --git a/config/services/solr/confs/conf/solrcore.properties b/config/services/solr/confs/conf/solrcore.properties new file mode 100644 index 00000000..af505a62 --- /dev/null +++ b/config/services/solr/confs/conf/solrcore.properties @@ -0,0 +1,9 @@ +#conf/solrcore.properties +solr.mlt.timeAllowed=2000 +solr.luceneMatchVersion=8.11.2 +solr.selectSearchHandler.timeAllowed=-1 +solr.autoCommit.MaxDocs=10000 +solr.autoCommit.MaxTime=15000 +solr.autoSoftCommit.MaxDocs=-1 +solr.autoSoftCommit.MaxTime=-1 +solr.install.dir=/opt/solr \ No newline at end of file diff --git a/config/services/solr/confs/conf/stopwords_ar.txt b/config/services/solr/confs/conf/stopwords_ar.txt new file mode 100644 index 00000000..e5ba3908 --- /dev/null +++ b/config/services/solr/confs/conf/stopwords_ar.txt @@ -0,0 +1,119 @@ +من +ومن +منها +منه +في +وفي +فيها +فيه +و +ف +ثم +او +أو +ب +بها +به +ا +أ +اى +اي +أي +أى +لا +ولا +الا +ألا +إلا +لكن +ما +وما +كما +فما +عن +مع +اذا +إذا +ان +أن +إن +انها +أنها +إنها +انه +أنه +إنه +بان +بأن +فان +فأن +وان +وأن +وإن +التى +التي +الذى +الذي +الذين +الى +الي +إلى +إلي +على +عليها +عليه +اما +أما +إما +ايضا +أيضا +كل +وكل +لم +ولم +لن +ولن +هى +هي +هو +وهى +وهي +وهو +فهى +فهي +فهو +انت +أنت +لك +لها +له +هذه +هذا +تلك +ذلك +هناك +كانت +كان +يكون +تكون +وكانت +وكان +غير +بعض +قد +نحو +بين +بينما +منذ +ضمن +حيث +الان +الآن +خلال +بعد +قبل +حتى +عند +عندما +لدى +جميع diff --git a/config/services/solr/confs/conf/stopwords_en.txt b/config/services/solr/confs/conf/stopwords_en.txt new file mode 100644 index 00000000..69810507 --- /dev/null +++ b/config/services/solr/confs/conf/stopwords_en.txt @@ -0,0 +1,35 @@ +a +an +and +are +as +at +be +but +by +for +if +in +into +is +it +no +not +of +on +or +s +such +t +that +the +their +then +there +these +they +this +to +was +will +with diff --git a/config/services/solr/confs/conf/stopwords_es.txt b/config/services/solr/confs/conf/stopwords_es.txt new file mode 100644 index 00000000..c59d9b20 --- /dev/null +++ b/config/services/solr/confs/conf/stopwords_es.txt @@ -0,0 +1,308 @@ +de +la +que +el +en +y +a +los +del +se +las +por +un +para +con +no +una +su +al +lo +como +más +pero +sus +le +ya +o +este +sí +porque +esta +entre +cuando +muy +sin +sobre +también +me +hasta +hay +donde +quien +desde +todo +nos +durante +todos +uno +les +ni +contra +otros +ese +eso +ante +ellos +e +esto +mí +antes +algunos +qué +unos +yo +otro +otras +otra +él +tanto +esa +estos +mucho +quienes +nada +muchos +cual +poco +ella +estar +estas +algunas +algo +nosotros +mi +mis +tú +te +ti +tu +tus +ellas +nosotras +vosotros +vosotras +os +mío +mía +míos +mías +tuyo +tuya +tuyos +tuyas +suyo +suya +suyos +suyas +nuestro +nuestra +nuestros +nuestras +vuestro +vuestra +vuestros +vuestras +esos +esas +estoy +estás +está +estamos +estáis +están +esté +estés +estemos +estéis +estén +estaré +estarás +estará +estaremos +estaréis +estarán +estaría +estarías +estaríamos +estaríais +estarían +estaba +estabas +estábamos +estabais +estaban +estuve +estuviste +estuvo +estuvimos +estuvisteis +estuvieron +estuviera +estuvieras +estuviéramos +estuvierais +estuvieran +estuviese +estuvieses +estuviésemos +estuvieseis +estuviesen +estando +estado +estada +estados +estadas +estad +he +has +ha +hemos +habéis +han +haya +hayas +hayamos +hayáis +hayan +habré +habrás +habrá +habremos +habréis +habrán +habría +habrías +habríamos +habríais +habrían +había +habías +habíamos +habíais +habían +hube +hubiste +hubo +hubimos +hubisteis +hubieron +hubiera +hubieras +hubiéramos +hubierais +hubieran +hubiese +hubieses +hubiésemos +hubieseis +hubiesen +habiendo +habido +habida +habidos +habidas +soy +eres +es +somos +sois +son +sea +seas +seamos +seáis +sean +seré +serás +será +seremos +seréis +serán +sería +serías +seríamos +seríais +serían +era +eras +éramos +erais +eran +fui +fuiste +fue +fuimos +fuisteis +fueron +fuera +fueras +fuéramos +fuerais +fueran +fuese +fueses +fuésemos +fueseis +fuesen +siendo +sido +tengo +tienes +tiene +tenemos +tenéis +tienen +tenga +tengas +tengamos +tengáis +tengan +tendré +tendrás +tendrá +tendremos +tendréis +tendrán +tendría +tendrías +tendríamos +tendríais +tendrían +tenía +tenías +teníamos +teníais +tenían +tuve +tuviste +tuvo +tuvimos +tuvisteis +tuvieron +tuviera +tuvieras +tuviéramos +tuvierais +tuvieran +tuviese +tuvieses +tuviésemos +tuvieseis +tuviesen +teniendo +tenido +tenida +tenidos +tenidas +tened diff --git a/config/services/solr/confs/conf/stopwords_fr.txt b/config/services/solr/confs/conf/stopwords_fr.txt new file mode 100644 index 00000000..c78ec5aa --- /dev/null +++ b/config/services/solr/confs/conf/stopwords_fr.txt @@ -0,0 +1,163 @@ +au +aux +avec +ce +ces +dans +de +des +du +elle +en +et +eux +il +je +la +le +leur +lui +ma +mais +me +même +mes +moi +mon +ne +nos +notre +nous +on +ou +par +pas +pour +qu +que +qui +sa +se +ses +son +sur +ta +te +tes +toi +ton +tu +un +une +vos +votre +vous +c +d +j +l +à +m +n +s +t +y +été +étée +étées +étés +étant +suis +es +est +sommes +êtes +sont +serai +seras +sera +serons +serez +seront +serais +serait +serions +seriez +seraient +étais +était +étions +étiez +étaient +fus +fut +fûmes +fûtes +furent +sois +soit +soyons +soyez +soient +fusse +fusses +fût +fussions +fussiez +fussent +ayant +eu +eue +eues +eus +ai +as +avons +avez +ont +aurai +auras +aura +aurons +aurez +auront +aurais +aurait +aurions +auriez +auraient +avais +avait +avions +aviez +avaient +eut +eûmes +eûtes +eurent +aie +aies +ait +ayons +ayez +aient +eusse +eusses +eût +eussions +eussiez +eussent +ceci +celà +cet +cette +ici +ils +les +leurs +quel +quels +quelle +quelles +sans +soi diff --git a/config/services/solr/confs/conf/stopwords_und.txt b/config/services/solr/confs/conf/stopwords_und.txt new file mode 100644 index 00000000..e69de29b diff --git a/config/services/solr/confs/conf/synonyms_ar.txt b/config/services/solr/confs/conf/synonyms_ar.txt new file mode 100644 index 00000000..91689ff9 --- /dev/null +++ b/config/services/solr/confs/conf/synonyms_ar.txt @@ -0,0 +1 @@ +drupal, durpal diff --git a/config/services/solr/confs/conf/synonyms_en.txt b/config/services/solr/confs/conf/synonyms_en.txt new file mode 100644 index 00000000..91689ff9 --- /dev/null +++ b/config/services/solr/confs/conf/synonyms_en.txt @@ -0,0 +1 @@ +drupal, durpal diff --git a/config/services/solr/confs/conf/synonyms_es.txt b/config/services/solr/confs/conf/synonyms_es.txt new file mode 100644 index 00000000..91689ff9 --- /dev/null +++ b/config/services/solr/confs/conf/synonyms_es.txt @@ -0,0 +1 @@ +drupal, durpal diff --git a/config/services/solr/confs/conf/synonyms_fr.txt b/config/services/solr/confs/conf/synonyms_fr.txt new file mode 100644 index 00000000..91689ff9 --- /dev/null +++ b/config/services/solr/confs/conf/synonyms_fr.txt @@ -0,0 +1 @@ +drupal, durpal diff --git a/config/services/solr/confs/conf/synonyms_und.txt b/config/services/solr/confs/conf/synonyms_und.txt new file mode 100644 index 00000000..91689ff9 --- /dev/null +++ b/config/services/solr/confs/conf/synonyms_und.txt @@ -0,0 +1 @@ +drupal, durpal diff --git a/config/services/solr/run/entrypoint b/config/services/solr/run/entrypoint new file mode 100755 index 00000000..8a4ed49b --- /dev/null +++ b/config/services/solr/run/entrypoint @@ -0,0 +1,9 @@ +#!/bin/sh + +set -e + +echo "Changing permissions of solr directories" +chown -R solr:solr /var/solr/data/ +chown -R solr:solr /var/solr/logs/ + +exec su solr -c "export PATH=$PATH; /opt/hpc-solr/run/solr" diff --git a/config/services/solr/run/solr b/config/services/solr/run/solr new file mode 100755 index 00000000..d94b7ea3 --- /dev/null +++ b/config/services/solr/run/solr @@ -0,0 +1,20 @@ +#!/bin/bash + +set -e + +SOLR_HOME=/var/solr/data + +CONFDIR=/opt/hpc-solr/confs +SOLR_CORE=${SOLR_CORE:-fts} + +cd ${SOLR_HOME} + +if [ -d ${SOLR_HOME}/${SOLR_CORE} ]; then + echo "HPC-specific SOLR initialization seems completed. Skipping it." + solr-foreground +else + echo "Performing a one time HPC-specific SOLR initialization..." + echo "Creating a new solr core..." + VERBOSE=yes solr-precreate ${SOLR_CORE} ${CONFDIR} + echo "HPC init done!" +fi diff --git a/src/data-providers/postgres/index.ts b/src/data-providers/postgres/index.ts index 753ebafa..3a96280e 100644 --- a/src/data-providers/postgres/index.ts +++ b/src/data-providers/postgres/index.ts @@ -4,10 +4,10 @@ import { CONFIG } from '../../../config'; /** * Initialize a new Postgres provider */ -export async function createDbConnetion() { +export async function createDbConnetion(connection?: string) { const knex = Knex({ client: 'pg', - connection: CONFIG.db.connection, + connection: connection ?? CONFIG.db.connection, pool: { min: CONFIG.db.poolMin, max: CONFIG.db.poolMax, diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml new file mode 100644 index 00000000..81ee10cc --- /dev/null +++ b/tests/docker-compose.yml @@ -0,0 +1,39 @@ +version: '3.8' +services: + db: + image: postgres:14-alpine3.18 + container_name: hpc_postgres_test_api + ports: + - 6432:5432 + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=test + - POSTGRES_DB=test + volumes: + - ./migration/migration.sql:/docker-entrypoint-initdb.d/init.sql + solr: + image: solr:8.11.2-slim + environment: + DNSDOCK_NAME: solr + DNSDOCK_IMAGE: hpc + SOLR_CORE: fts ## take it as env var SOLR_CORE: ${SOLR_CORE} + container_name: hpcservice_solr_1_test_api + volumes: + - '../config/services/solr:/opt/hpc-solr:rw' + - '../data/test/hpc/solr_logs:/var/solr/logs:rw' + - '../data/test/hpc/solr_backups:/var/solr/data/backups:rw' + - '../data/test/hpc/solr:/var/solr/data:rw' + entrypoint: /opt/hpc-solr/run/entrypoint + user: root + ulimits: + nofile: + soft: 65000 + hard: 65000 + hid: + image: public.ecr.aws/unocha/nodejs:18-alpine + volumes: + - ../tools/hid_api_mock:/srv/www + environment: + PORT: 80 + hostname: hidapi + container_name: test_hidservice_test_api From 8df7cd03af9f271d33313558011941e5af5e07f6 Mon Sep 17 00:00:00 2001 From: Pl217 Date: Fri, 17 Nov 2023 15:15:48 +0100 Subject: [PATCH 03/67] =?UTF-8?q?=F0=9F=99=88=20Ignore=20data=20directory?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8ee5ef57..ab4a50b5 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ package-lock.json ._entypo-social .smbdelete* test-data/* +data/* .env .vscode *.sublime-project From 0176887b8612d1ab55f59217a4a409e028bcb373 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 08:41:58 +0200 Subject: [PATCH 04/67] Add DB migration backup --- tests/migration/migration.sql | 8392 +++++++++++++++++++++++++++++++++ 1 file changed, 8392 insertions(+) create mode 100644 tests/migration/migration.sql diff --git a/tests/migration/migration.sql b/tests/migration/migration.sql new file mode 100644 index 00000000..6ebc8cf0 --- /dev/null +++ b/tests/migration/migration.sql @@ -0,0 +1,8392 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 11.7 (Debian 11.7-2.pgdg90+1) +-- Dumped by pg_dump version 11.7 (Debian 11.7-2.pgdg90+1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: topology; Type: SCHEMA; Schema: -; Owner: postgres +-- + +CREATE SCHEMA topology; + + +ALTER SCHEMA topology OWNER TO postgres; + +-- +-- Name: SCHEMA topology; Type: COMMENT; Schema: -; Owner: postgres +-- + +COMMENT ON SCHEMA topology IS 'PostGIS Topology schema'; + + +-- +-- Name: unaccent; Type: EXTENSION; Schema: -; Owner: +-- + +CREATE EXTENSION IF NOT EXISTS unaccent WITH SCHEMA public; + + +-- +-- Name: EXTENSION unaccent; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION unaccent IS 'text search dictionary that removes accents'; + + +-- +-- Name: enum_authGrantee_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_authGrantee_type" AS ENUM ( + 'user' +); + + +ALTER TYPE public."enum_authGrantee_type" OWNER TO postgres; + +-- +-- Name: enum_authTarget_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_authTarget_type" AS ENUM ( + 'global', + 'operation', + 'operationCluster', + 'plan', + 'governingEntity', + 'project' +); + + +ALTER TYPE public."enum_authTarget_type" OWNER TO postgres; + +-- +-- Name: enum_flowObject_behavior; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_flowObject_behavior" AS ENUM ( + 'overlap', + 'shared' +); + + +ALTER TYPE public."enum_flowObject_behavior" OWNER TO postgres; + +-- +-- Name: enum_form_belongsToType; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_form_belongsToType" AS ENUM ( + 'global', + 'operation' +); + + +ALTER TYPE public."enum_form_belongsToType" OWNER TO postgres; + +-- +-- Name: enum_iatiPublisher_fetchStatus; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_iatiPublisher_fetchStatus" AS ENUM ( + 'queued', + 'downloading', + 'downloaded', + 'halted' +); + + +ALTER TYPE public."enum_iatiPublisher_fetchStatus" OWNER TO postgres; + +-- +-- Name: enum_job_status; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.enum_job_status AS ENUM ( + 'pending', + 'success', + 'failed' +); + + +ALTER TYPE public.enum_job_status OWNER TO postgres; + +-- +-- Name: enum_job_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.enum_job_type AS ENUM ( + 'projectPdfGeneration', + 'locationImport', + 'confirmableCommand' +); + + +ALTER TYPE public.enum_job_type OWNER TO postgres; + +-- +-- Name: enum_objectExclude_module; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_objectExclude_module" AS ENUM ( + 'FTS', + 'RPM', + 'Public' +); + + +ALTER TYPE public."enum_objectExclude_module" OWNER TO postgres; + +-- +-- Name: enum_project_implementationStatus; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_project_implementationStatus" AS ENUM ( + 'Planning', + 'Implementing', + 'Ended - Completed', + 'Ended - Terminated', + 'Ended - Not started and abandoned' +); + + +ALTER TYPE public."enum_project_implementationStatus" OWNER TO postgres; + +-- +-- Name: enum_reportingWindowAssignment_assigneeType; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_reportingWindowAssignment_assigneeType" AS ENUM ( + 'operation', + 'operationCluster' +); + + +ALTER TYPE public."enum_reportingWindowAssignment_assigneeType" OWNER TO postgres; + +-- +-- Name: enum_reportingWindow_belongsToType; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public."enum_reportingWindow_belongsToType" AS ENUM ( + 'global', + 'operation' +); + + +ALTER TYPE public."enum_reportingWindow_belongsToType" OWNER TO postgres; + +SET default_tablespace = ''; + +SET default_with_oids = false; + +-- +-- Name: governingEntity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."governingEntity" ( + id integer NOT NULL, + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "entityPrototypeId" integer NOT NULL, + "deletedAt" timestamp with time zone, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[] +); + + +ALTER TABLE public."governingEntity" OWNER TO postgres; + +-- +-- Name: entityType(public."governingEntity"); Type: FUNCTION; Schema: public; Owner: postgres +-- + +CREATE FUNCTION public."entityType"(public."governingEntity") RETURNS character varying + LANGUAGE sql IMMUTABLE + AS $_$ SELECT "entityPrototype"."refCode" FROM "public"."entityPrototype" WHERE "entityPrototype"."id" = $1."entityPrototypeId" $_$; + + +ALTER FUNCTION public."entityType"(public."governingEntity") OWNER TO postgres; + +-- +-- Name: projectVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersion" ( + id integer NOT NULL, + "projectId" integer NOT NULL, + version integer NOT NULL, + name text NOT NULL, + "currentRequestedFunds" bigint, + "startDate" date, + "endDate" date, + objective text, + partners text, + "createdAt" timestamp with time zone, + "updatedAt" timestamp with time zone, + code character varying(255), + "editorParticipantId" integer, + tags character varying(25)[] DEFAULT (ARRAY[]::character varying[])::character varying(25)[] +); + + +ALTER TABLE public."projectVersion" OWNER TO postgres; + +-- +-- Name: implementationStatus(public."projectVersion"); Type: FUNCTION; Schema: public; Owner: postgres +-- + +CREATE FUNCTION public."implementationStatus"(public."projectVersion") RETURNS character varying + LANGUAGE sql STABLE + AS $_$ SELECT (CASE WHEN "project"."currentPublishedVersionId" = $1."id" THEN 'published' WHEN "project"."currentPublishedVersionId" IS NULL AND $1."id" = "project"."latestVersionId" THEN 'unpublished' WHEN "project"."currentPublishedVersionId" IS NOT NULL AND $1."id" = "project"."latestVersionId" THEN 'draft' ELSE 'archived' END) FROM "project" WHERE "project"."id" = $1."projectId" $_$; + + +ALTER FUNCTION public."implementationStatus"(public."projectVersion") OWNER TO postgres; + +-- +-- Name: SequelizeMeta; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."SequelizeMeta" ( + name character varying(255) NOT NULL +); + + +ALTER TABLE public."SequelizeMeta" OWNER TO postgres; + +-- +-- Name: attachment; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.attachment ( + id integer NOT NULL, + "objectId" integer NOT NULL, + "objectType" character varying(255) NOT NULL, + type character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "attachmentPrototypeId" integer, + "deletedAt" timestamp with time zone, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[], + "planId" integer +); + + +ALTER TABLE public.attachment OWNER TO postgres; + +-- +-- Name: attachmentPrototype; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."attachmentPrototype" ( + id integer NOT NULL, + "refCode" character varying(255), + type character varying(255), + value json, + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."attachmentPrototype" OWNER TO postgres; + +-- +-- Name: attachmentPrototype_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."attachmentPrototype_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."attachmentPrototype_id_seq" OWNER TO postgres; + +-- +-- Name: attachmentPrototype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."attachmentPrototype_id_seq" OWNED BY public."attachmentPrototype".id; + + +-- +-- Name: attachmentVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."attachmentVersion" ( + id integer NOT NULL, + "attachmentId" integer, + "customReference" character varying(255), + value jsonb, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[], + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "hasDisaggregatedData" boolean DEFAULT false +); + + +ALTER TABLE public."attachmentVersion" OWNER TO postgres; + +-- +-- Name: attachmentVersion_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."attachmentVersion_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."attachmentVersion_id_seq" OWNER TO postgres; + +-- +-- Name: attachmentVersion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."attachmentVersion_id_seq" OWNED BY public."attachmentVersion".id; + + +-- +-- Name: attachment_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.attachment_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.attachment_id_seq OWNER TO postgres; + +-- +-- Name: attachment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.attachment_id_seq OWNED BY public.attachment.id; + + +-- +-- Name: authGrant; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."authGrant" ( + "createdAt" timestamp with time zone NOT NULL, + grantee integer NOT NULL, + roles character varying(255)[] NOT NULL, + target integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."authGrant" OWNER TO postgres; + +-- +-- Name: authGrantLog; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."authGrantLog" ( + actor integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + date timestamp with time zone NOT NULL, + grantee integer NOT NULL, + id integer NOT NULL, + "newRoles" character varying(255)[] NOT NULL, + target integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."authGrantLog" OWNER TO postgres; + +-- +-- Name: authGrantLog_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."authGrantLog_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."authGrantLog_id_seq" OWNER TO postgres; + +-- +-- Name: authGrantLog_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."authGrantLog_id_seq" OWNED BY public."authGrantLog".id; + + +-- +-- Name: authGrantee; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."authGrantee" ( + "createdAt" timestamp with time zone NOT NULL, + "granteeId" integer NOT NULL, + id integer NOT NULL, + type public."enum_authGrantee_type" NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."authGrantee" OWNER TO postgres; + +-- +-- Name: authGrantee_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."authGrantee_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."authGrantee_id_seq" OWNER TO postgres; + +-- +-- Name: authGrantee_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."authGrantee_id_seq" OWNED BY public."authGrantee".id; + + +-- +-- Name: authInvite; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."authInvite" ( + actor integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + email character varying(255) NOT NULL, + roles character varying(255)[] NOT NULL, + target integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."authInvite" OWNER TO postgres; + +-- +-- Name: authTarget; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."authTarget" ( + "createdAt" timestamp with time zone NOT NULL, + id integer NOT NULL, + "targetId" integer, + type public."enum_authTarget_type" NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."authTarget" OWNER TO postgres; + +-- +-- Name: authTarget_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."authTarget_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."authTarget_id_seq" OWNER TO postgres; + +-- +-- Name: authTarget_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."authTarget_id_seq" OWNED BY public."authTarget".id; + + +-- +-- Name: authToken; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."authToken" ( + "createdAt" timestamp with time zone NOT NULL, + expires timestamp with time zone, + participant integer NOT NULL, + "tokenHash" character varying(255) NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."authToken" OWNER TO postgres; + +-- +-- Name: blueprint; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.blueprint ( + id integer NOT NULL, + name character varying(255), + description text, + status character varying(255), + model json, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + type character varying(255) +); + + +ALTER TABLE public.blueprint OWNER TO postgres; + +-- +-- Name: budgetSegment; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."budgetSegment" ( + id integer NOT NULL, + "projectVersionId" integer NOT NULL, + name character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."budgetSegment" OWNER TO postgres; + +-- +-- Name: budgetSegmentBreakdown; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."budgetSegmentBreakdown" ( + id integer NOT NULL, + "budgetSegmentId" integer, + name character varying(255), + content jsonb, + type character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."budgetSegmentBreakdown" OWNER TO postgres; + +-- +-- Name: budgetSegmentBreakdownEntity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."budgetSegmentBreakdownEntity" ( + id integer NOT NULL, + "budgetSegmentBreakdownId" integer, + "objectType" character varying(255) NOT NULL, + "objectId" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."budgetSegmentBreakdownEntity" OWNER TO postgres; + +-- +-- Name: budgetSegmentBreakdownEntity_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."budgetSegmentBreakdownEntity_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."budgetSegmentBreakdownEntity_id_seq" OWNER TO postgres; + +-- +-- Name: budgetSegmentBreakdownEntity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."budgetSegmentBreakdownEntity_id_seq" OWNED BY public."budgetSegmentBreakdownEntity".id; + + +-- +-- Name: budgetSegmentBreakdown_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."budgetSegmentBreakdown_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."budgetSegmentBreakdown_id_seq" OWNER TO postgres; + +-- +-- Name: budgetSegmentBreakdown_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."budgetSegmentBreakdown_id_seq" OWNED BY public."budgetSegmentBreakdown".id; + + +-- +-- Name: budgetSegment_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."budgetSegment_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."budgetSegment_id_seq" OWNER TO postgres; + +-- +-- Name: budgetSegment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."budgetSegment_id_seq" OWNED BY public."budgetSegment".id; + + +-- +-- Name: cache; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.cache ( + "createdAt" timestamp with time zone NOT NULL, + data jsonb NOT NULL, + fingerprint character varying(255) NOT NULL, + namespace character varying(255) NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + tag character varying(255) +); + + +ALTER TABLE public.cache OWNER TO postgres; + +-- +-- Name: category; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.category ( + id integer NOT NULL, + name character varying(255) NOT NULL, + description character varying(255), + "parentID" integer, + "group" character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + code character varying(255), + "includeTotals" boolean +); + + +ALTER TABLE public.category OWNER TO postgres; + +-- +-- Name: categoryGroup; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."categoryGroup" ( + type character varying(255) NOT NULL, + name character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."categoryGroup" OWNER TO postgres; + +-- +-- Name: categoryLegacy; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."categoryLegacy" ( + id integer NOT NULL, + "group" character varying(255) NOT NULL, + "legacyID" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."categoryLegacy" OWNER TO postgres; + +-- +-- Name: categoryRef; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."categoryRef" ( + "objectID" integer NOT NULL, + "versionID" integer DEFAULT 1 NOT NULL, + "objectType" character varying(32) NOT NULL, + "categoryID" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."categoryRef" OWNER TO postgres; + +-- +-- Name: category_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.category_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.category_id_seq OWNER TO postgres; + +-- +-- Name: category_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.category_id_seq OWNED BY public.category.id; + + +-- +-- Name: client; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.client ( + id integer NOT NULL, + "clientId" character varying(255), + "clientSecret" character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.client OWNER TO postgres; + +-- +-- Name: client_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.client_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.client_id_seq OWNER TO postgres; + +-- +-- Name: client_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.client_id_seq OWNED BY public.client.id; + + +-- +-- Name: conditionField; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."conditionField" ( + id integer NOT NULL, + name character varying(255) NOT NULL, + "fieldType" character varying(255) NOT NULL, + rules jsonb, + required boolean NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "order" integer, + description text, + "grouping" boolean DEFAULT true NOT NULL, + label jsonb DEFAULT '{}'::jsonb NOT NULL, + "planId" integer NOT NULL +); + + +ALTER TABLE public."conditionField" OWNER TO postgres; + +-- +-- Name: conditionFieldReliesOn; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."conditionFieldReliesOn" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "reliedOnById" integer NOT NULL, + "reliesOnId" integer NOT NULL +); + + +ALTER TABLE public."conditionFieldReliesOn" OWNER TO postgres; + +-- +-- Name: conditionFieldType; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."conditionFieldType" ( + type character varying(32) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."conditionFieldType" OWNER TO postgres; + +-- +-- Name: conditionField_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."conditionField_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."conditionField_id_seq" OWNER TO postgres; + +-- +-- Name: conditionField_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."conditionField_id_seq" OWNED BY public."conditionField".id; + + +-- +-- Name: currency; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.currency ( + id integer NOT NULL, + code character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.currency OWNER TO postgres; + +-- +-- Name: currency_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.currency_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.currency_id_seq OWNER TO postgres; + +-- +-- Name: currency_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.currency_id_seq OWNED BY public.currency.id; + + +-- +-- Name: disaggregationCategory; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."disaggregationCategory" ( + id integer NOT NULL, + name character varying(255), + label character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "disaggregationCategoryGroupId" integer, + "tagHxl" character varying(255) +); + + +ALTER TABLE public."disaggregationCategory" OWNER TO postgres; + +-- +-- Name: disaggregationCategoryGroup; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."disaggregationCategoryGroup" ( + id integer NOT NULL, + name character varying(255), + label character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "unitTypeId" integer, + "planId" integer +); + + +ALTER TABLE public."disaggregationCategoryGroup" OWNER TO postgres; + +-- +-- Name: disaggregationCategoryGroup_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."disaggregationCategoryGroup_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."disaggregationCategoryGroup_id_seq" OWNER TO postgres; + +-- +-- Name: disaggregationCategoryGroup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."disaggregationCategoryGroup_id_seq" OWNED BY public."disaggregationCategoryGroup".id; + + +-- +-- Name: disaggregationCategory_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."disaggregationCategory_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."disaggregationCategory_id_seq" OWNER TO postgres; + +-- +-- Name: disaggregationCategory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."disaggregationCategory_id_seq" OWNED BY public."disaggregationCategory".id; + + +-- +-- Name: disaggregationModel; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."disaggregationModel" ( + id integer NOT NULL, + name character varying(255), + creator json, + value json, + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."disaggregationModel" OWNER TO postgres; + +-- +-- Name: disaggregationModel_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."disaggregationModel_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."disaggregationModel_id_seq" OWNER TO postgres; + +-- +-- Name: disaggregationModel_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."disaggregationModel_id_seq" OWNED BY public."disaggregationModel".id; + + +-- +-- Name: emergency; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.emergency ( + id integer NOT NULL, + name character varying(255), + description text, + date timestamp with time zone, + "glideId" character varying(255), + "levelThree" boolean, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + active boolean DEFAULT true, + restricted boolean +); + + +ALTER TABLE public.emergency OWNER TO postgres; + +-- +-- Name: emergencyLocation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."emergencyLocation" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "emergencyId" integer NOT NULL, + "locationId" integer NOT NULL +); + + +ALTER TABLE public."emergencyLocation" OWNER TO postgres; + +-- +-- Name: emergency_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.emergency_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.emergency_id_seq OWNER TO postgres; + +-- +-- Name: emergency_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.emergency_id_seq OWNED BY public.emergency.id; + + +-- +-- Name: endpointLog; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."endpointLog" ( + id integer NOT NULL, + "participantId" integer, + "entityType" character varying(255), + "entityId" integer, + "editType" character varying(255), + value jsonb, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."endpointLog" OWNER TO postgres; + +-- +-- Name: endpointLog_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."endpointLog_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."endpointLog_id_seq" OWNER TO postgres; + +-- +-- Name: endpointLog_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."endpointLog_id_seq" OWNED BY public."endpointLog".id; + + +-- +-- Name: endpointTrace; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."endpointTrace" ( + id uuid NOT NULL, + route character varying(255), + method character varying(255), + container character varying(255), + "memBefore" jsonb, + "memAfter" jsonb, + status integer, + "time" integer, + "createdAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."endpointTrace" OWNER TO postgres; + +-- +-- Name: endpointUsage; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."endpointUsage" ( + path character varying(255) NOT NULL, + method character varying(255) NOT NULL, + nb integer DEFAULT 0, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."endpointUsage" OWNER TO postgres; + +-- +-- Name: entitiesAssociation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."entitiesAssociation" ( + "parentId" integer NOT NULL, + "parentType" character varying(255) NOT NULL, + "childId" integer NOT NULL, + "childType" character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."entitiesAssociation" OWNER TO postgres; + +-- +-- Name: entityCategories; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."entityCategories" ( + id integer NOT NULL, + value json, + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."entityCategories" OWNER TO postgres; + +-- +-- Name: entityCategories_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."entityCategories_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."entityCategories_id_seq" OWNER TO postgres; + +-- +-- Name: entityCategories_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."entityCategories_id_seq" OWNED BY public."entityCategories".id; + + +-- +-- Name: entityCategory; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."entityCategory" ( + id integer NOT NULL, + value json, + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."entityCategory" OWNER TO postgres; + +-- +-- Name: entityCategory_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."entityCategory_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."entityCategory_id_seq" OWNER TO postgres; + +-- +-- Name: entityCategory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."entityCategory_id_seq" OWNED BY public."entityCategory".id; + + +-- +-- Name: entityPrototype; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."entityPrototype" ( + id integer NOT NULL, + "refCode" character varying(255), + type character varying(255), + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "orderNumber" integer, + value json +); + + +ALTER TABLE public."entityPrototype" OWNER TO postgres; + +-- +-- Name: entityPrototype_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."entityPrototype_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."entityPrototype_id_seq" OWNER TO postgres; + +-- +-- Name: entityPrototype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."entityPrototype_id_seq" OWNED BY public."entityPrototype".id; + + +-- +-- Name: externalData; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."externalData" ( + id integer NOT NULL, + "flowID" integer NOT NULL, + "versionID" integer, + "systemID" character varying(255) NOT NULL, + "externalRefID" character varying(255), + "externalRefDate" timestamp with time zone, + "objectType" character varying(255), + "refDirection" character varying(255), + data text, + matched boolean, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."externalData" OWNER TO postgres; + +-- +-- Name: externalData_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."externalData_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."externalData_id_seq" OWNER TO postgres; + +-- +-- Name: externalData_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."externalData_id_seq" OWNED BY public."externalData".id; + + +-- +-- Name: externalReference; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."externalReference" ( + id integer NOT NULL, + "systemID" character varying(255) NOT NULL, + "flowID" integer NOT NULL, + "versionID" integer, + "externalRecordID" character varying(255), + "externalRecordDate" timestamp with time zone, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "importInformation" json +); + + +ALTER TABLE public."externalReference" OWNER TO postgres; + +-- +-- Name: externalReference_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."externalReference_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."externalReference_id_seq" OWNER TO postgres; + +-- +-- Name: externalReference_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."externalReference_id_seq" OWNED BY public."externalReference".id; + + +-- +-- Name: fileAssetEntity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."fileAssetEntity" ( + id integer NOT NULL, + filename character varying(255), + originalname character varying(255), + size integer, + mimetype character varying(255), + path character varying(255), + collection character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."fileAssetEntity" OWNER TO postgres; + +-- +-- Name: fileAssetEntity_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."fileAssetEntity_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."fileAssetEntity_id_seq" OWNER TO postgres; + +-- +-- Name: fileAssetEntity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."fileAssetEntity_id_seq" OWNED BY public."fileAssetEntity".id; + + +-- +-- Name: fileRecord; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."fileRecord" ( + "createdAt" timestamp with time zone NOT NULL, + hash character varying(255) NOT NULL, + namespace character varying(255) NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."fileRecord" OWNER TO postgres; + +-- +-- Name: flow; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.flow ( + id integer NOT NULL, + "versionID" integer DEFAULT 1 NOT NULL, + "amountUSD" bigint NOT NULL, + "flowDate" timestamp with time zone, + "decisionDate" timestamp with time zone, + "firstReportedDate" timestamp with time zone, + "budgetYear" character varying(255), + "origAmount" bigint, + "origCurrency" character varying(255), + "exchangeRate" numeric(18,6), + "activeStatus" boolean DEFAULT true NOT NULL, + restricted boolean DEFAULT false NOT NULL, + description text, + notes text, + "versionStartDate" timestamp with time zone, + "versionEndDate" timestamp with time zone, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone, + "newMoney" boolean DEFAULT false NOT NULL +); + + +ALTER TABLE public.flow OWNER TO postgres; + +-- +-- Name: flowLink; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."flowLink" ( + "parentID" integer NOT NULL, + "childID" integer NOT NULL, + depth integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."flowLink" OWNER TO postgres; + +-- +-- Name: flowObject; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."flowObject" ( + "flowID" integer NOT NULL, + "objectID" integer NOT NULL, + "versionID" integer DEFAULT 1 NOT NULL, + "objectType" character varying(32) NOT NULL, + "refDirection" character varying(255) NOT NULL, + behavior public."enum_flowObject_behavior", + "objectDetail" character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."flowObject" OWNER TO postgres; + +-- +-- Name: flowObjectType; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."flowObjectType" ( + type character varying(32) NOT NULL, + name character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."flowObjectType" OWNER TO postgres; + +-- +-- Name: flow_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.flow_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.flow_id_seq OWNER TO postgres; + +-- +-- Name: flow_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.flow_id_seq OWNED BY public.flow.id; + + +-- +-- Name: form; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.form ( + "belongsToId" integer, + "belongsToType" public."enum_form_belongsToType" NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + id integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.form OWNER TO postgres; + +-- +-- Name: formVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."formVersion" ( + "createdAt" timestamp with time zone NOT NULL, + data jsonb NOT NULL, + "isLatest" boolean NOT NULL, + "modifiedBy" integer, + root integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + version integer NOT NULL +); + + +ALTER TABLE public."formVersion" OWNER TO postgres; + +-- +-- Name: form_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.form_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.form_id_seq OWNER TO postgres; + +-- +-- Name: form_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.form_id_seq OWNED BY public.form.id; + + +-- +-- Name: projectVersionComment; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionComment" ( + id integer NOT NULL, + content text, + "participantId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "projectVersionPlanId" integer, + step character varying(255) +); + + +ALTER TABLE public."projectVersionComment" OWNER TO postgres; + +-- +-- Name: fulfillmentComment_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."fulfillmentComment_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."fulfillmentComment_id_seq" OWNER TO postgres; + +-- +-- Name: fulfillmentComment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."fulfillmentComment_id_seq" OWNED BY public."projectVersionComment".id; + + +-- +-- Name: globalCluster; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."globalCluster" ( + id integer NOT NULL, + "hrinfoId" integer, + type character varying(255), + name character varying(255), + code character varying(255), + homepage character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "parentId" integer, + "defaultIconId" character varying(255), + "displayFTSSummariesFromYear" integer +); + + +ALTER TABLE public."globalCluster" OWNER TO postgres; + +-- +-- Name: globalClusterAssociation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."globalClusterAssociation" ( + id integer NOT NULL, + "globalClusterId" integer, + "governingEntityId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."globalClusterAssociation" OWNER TO postgres; + +-- +-- Name: globalClusterAssociation_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."globalClusterAssociation_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."globalClusterAssociation_id_seq" OWNER TO postgres; + +-- +-- Name: globalClusterAssociation_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."globalClusterAssociation_id_seq" OWNED BY public."globalClusterAssociation".id; + + +-- +-- Name: globalCluster_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."globalCluster_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."globalCluster_id_seq" OWNER TO postgres; + +-- +-- Name: globalCluster_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."globalCluster_id_seq" OWNED BY public."globalCluster".id; + + +-- +-- Name: globalIndicator; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."globalIndicator" ( + id integer NOT NULL, + "hrinfoId" integer, + label text, + "subDomain" character varying(255), + code character varying(255), + unit character varying(255), + "searchData" text, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."globalIndicator" OWNER TO postgres; + +-- +-- Name: globalIndicator_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."globalIndicator_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."globalIndicator_id_seq" OWNER TO postgres; + +-- +-- Name: globalIndicator_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."globalIndicator_id_seq" OWNED BY public."globalIndicator".id; + + +-- +-- Name: governingEntityVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."governingEntityVersion" ( + id integer NOT NULL, + "governingEntityId" integer, + name character varying(255), + "customReference" character varying(255), + value json, + overriding boolean DEFAULT false NOT NULL, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[], + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + tags character varying(25)[] DEFAULT (ARRAY[]::character varying[])::character varying(25)[] +); + + +ALTER TABLE public."governingEntityVersion" OWNER TO postgres; + +-- +-- Name: governingEntityVersion_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."governingEntityVersion_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."governingEntityVersion_id_seq" OWNER TO postgres; + +-- +-- Name: governingEntityVersion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."governingEntityVersion_id_seq" OWNED BY public."governingEntityVersion".id; + + +-- +-- Name: governingEntity_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."governingEntity_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."governingEntity_id_seq" OWNER TO postgres; + +-- +-- Name: governingEntity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."governingEntity_id_seq" OWNED BY public."governingEntity".id; + + +-- +-- Name: highWater; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."highWater" ( + "jobName" character varying(255) NOT NULL, + "timestamp" timestamp with time zone NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."highWater" OWNER TO postgres; + +-- +-- Name: iatiActivity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiActivity" ( + id integer NOT NULL, + "iatiIdentifier" character varying(255), + title text, + version character varying(255), + "startDate" date, + "endDate" date, + description text, + "reportingOrgRef" character varying(255), + "reportingOrgName" character varying(255), + currency character varying(255), + humanitarian boolean DEFAULT false, + "iatiHumanitarian" boolean DEFAULT false, + hash character varying(255), + "lastUpdatedAt" timestamp with time zone, + "updatedStatus" boolean DEFAULT false, + viewed boolean DEFAULT false, + "iatiPublisherId" character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiActivity" OWNER TO postgres; + +-- +-- Name: iatiActivity_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiActivity_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiActivity_id_seq" OWNER TO postgres; + +-- +-- Name: iatiActivity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiActivity_id_seq" OWNED BY public."iatiActivity".id; + + +-- +-- Name: iatiFTSMap; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiFTSMap" ( + id integer NOT NULL, + data json NOT NULL, + "oldData" json, + "sourceOrganizations" character varying(255)[], + "destinationOrganizations" character varying(255)[], + "recipientCountries" character varying(255)[], + updated boolean DEFAULT true NOT NULL, + added boolean DEFAULT false NOT NULL, + humanitarian boolean DEFAULT false, + active boolean DEFAULT true, + "iatiPublisherID" character varying(255), + "iatiActivityID" integer, + "flowID" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiFTSMap" OWNER TO postgres; + +-- +-- Name: iatiFTSMap_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiFTSMap_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiFTSMap_id_seq" OWNER TO postgres; + +-- +-- Name: iatiFTSMap_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiFTSMap_id_seq" OWNED BY public."iatiFTSMap".id; + + +-- +-- Name: iatiFTSMatch; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiFTSMatch" ( + id integer NOT NULL, + data json NOT NULL, + "iatiFTSMapID" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiFTSMatch" OWNER TO postgres; + +-- +-- Name: iatiFTSMatch_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiFTSMatch_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiFTSMatch_id_seq" OWNER TO postgres; + +-- +-- Name: iatiFTSMatch_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiFTSMatch_id_seq" OWNED BY public."iatiFTSMatch".id; + + +-- +-- Name: iatiHumanitarianScope; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiHumanitarianScope" ( + id integer NOT NULL, + "iatiIdentifier" character varying(255), + type character varying(255), + vocabulary character varying(255), + "vocabularyUrl" character varying(255), + code character varying(255), + "iatiActivityId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiHumanitarianScope" OWNER TO postgres; + +-- +-- Name: iatiHumanitarianScope_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiHumanitarianScope_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiHumanitarianScope_id_seq" OWNER TO postgres; + +-- +-- Name: iatiHumanitarianScope_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiHumanitarianScope_id_seq" OWNED BY public."iatiHumanitarianScope".id; + + +-- +-- Name: iatiParticipatingOrg; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiParticipatingOrg" ( + id integer NOT NULL, + "iatiIdentifier" character varying(255), + ref character varying(255), + type character varying(255), + role character varying(255), + name character varying(255), + "iatiActivityId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiParticipatingOrg" OWNER TO postgres; + +-- +-- Name: iatiParticipatingOrg_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiParticipatingOrg_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiParticipatingOrg_id_seq" OWNER TO postgres; + +-- +-- Name: iatiParticipatingOrg_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiParticipatingOrg_id_seq" OWNED BY public."iatiParticipatingOrg".id; + + +-- +-- Name: iatiPublisher; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiPublisher" ( + id character varying(255) NOT NULL, + active boolean DEFAULT false, + "queuedAt" timestamp with time zone, + "fetchStatus" public."enum_iatiPublisher_fetchStatus" DEFAULT 'halted'::public."enum_iatiPublisher_fetchStatus", + "lastFetchedAt" timestamp with time zone, + name character varying(255) NOT NULL, + country character varying(255), + "organizationType" character varying(255), + datasets integer, + "xmlData" text, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiPublisher" OWNER TO postgres; + +-- +-- Name: iatiRecipientCountry; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiRecipientCountry" ( + id integer NOT NULL, + "iatiIdentifier" character varying(255), + code character varying(255), + iso3 character varying(255), + percentage character varying(255), + text character varying(255), + "isCountry" boolean, + "iatiActivityId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone +); + + +ALTER TABLE public."iatiRecipientCountry" OWNER TO postgres; + +-- +-- Name: iatiRecipientCountry_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiRecipientCountry_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiRecipientCountry_id_seq" OWNER TO postgres; + +-- +-- Name: iatiRecipientCountry_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiRecipientCountry_id_seq" OWNED BY public."iatiRecipientCountry".id; + + +-- +-- Name: iatiSector; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiSector" ( + id integer NOT NULL, + "iatiIdentifier" character varying(255), + code character varying(255), + percentage character varying(255), + text text, + "iatiActivityId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone, + vocabulary character varying(255) +); + + +ALTER TABLE public."iatiSector" OWNER TO postgres; + +-- +-- Name: iatiSector_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiSector_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiSector_id_seq" OWNER TO postgres; + +-- +-- Name: iatiSector_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiSector_id_seq" OWNED BY public."iatiSector".id; + + +-- +-- Name: iatiTransaction; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."iatiTransaction" ( + id integer NOT NULL, + "iatiIdentifier" character varying(255), + ref text, + sector json, + date character varying(255), + type character varying(255), + currency character varying(255), + humanitarian boolean DEFAULT false, + value double precision, + "providerOrgRef" character varying(255), + "providerOrgName" character varying(255), + "receiverOrgRef" character varying(255), + "receiverOrgName" character varying(255), + "recipientCountry" json, + "iatiActivityId" integer, + "iatiFTSMapId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone, + description text +); + + +ALTER TABLE public."iatiTransaction" OWNER TO postgres; + +-- +-- Name: iatiTransaction_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."iatiTransaction_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."iatiTransaction_id_seq" OWNER TO postgres; + +-- +-- Name: iatiTransaction_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."iatiTransaction_id_seq" OWNED BY public."iatiTransaction".id; + + +-- +-- Name: icon; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.icon ( + id character varying(255) NOT NULL, + svg bytea, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.icon OWNER TO postgres; + +-- +-- Name: job; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.job ( + id integer NOT NULL, + "startAt" timestamp with time zone NOT NULL, + "endAt" timestamp with time zone, + status public.enum_job_status DEFAULT 'pending'::public.enum_job_status NOT NULL, + type public.enum_job_type NOT NULL, + metadata jsonb, + "totalTaskCount" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.job OWNER TO postgres; + +-- +-- Name: jobAssociation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."jobAssociation" ( + "jobId" integer NOT NULL, + "objectId" integer NOT NULL, + "objectType" character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."jobAssociation" OWNER TO postgres; + +-- +-- Name: job_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.job_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.job_id_seq OWNER TO postgres; + +-- +-- Name: job_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.job_id_seq OWNED BY public.job.id; + + +-- +-- Name: legacy; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.legacy ( + "objectType" character varying(32) NOT NULL, + "objectID" integer NOT NULL, + "legacyID" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.legacy OWNER TO postgres; + +-- +-- Name: location; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.location ( + id integer NOT NULL, + "externalId" character varying(255), + name character varying(255), + "adminLevel" integer, + latitude double precision, + longitude double precision, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "parentId" integer, + iso3 character varying(3), + pcode character varying(255), + status character varying(255), + "validOn" bigint, + "itosSync" boolean DEFAULT true +); + + +ALTER TABLE public.location OWNER TO postgres; + +-- +-- Name: location_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.location_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.location_id_seq OWNER TO postgres; + +-- +-- Name: location_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.location_id_seq OWNED BY public.location.id; + + +-- +-- Name: measurement; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.measurement ( + id integer NOT NULL, + "attachmentId" integer, + "planReportingPeriodId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "deletedAt" timestamp with time zone, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[] +); + + +ALTER TABLE public.measurement OWNER TO postgres; + +-- +-- Name: measurementVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."measurementVersion" ( + id integer NOT NULL, + "measurementId" integer, + value jsonb, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[], + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."measurementVersion" OWNER TO postgres; + +-- +-- Name: measurementVersion_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."measurementVersion_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."measurementVersion_id_seq" OWNER TO postgres; + +-- +-- Name: measurementVersion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."measurementVersion_id_seq" OWNED BY public."measurementVersion".id; + + +-- +-- Name: measurement_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.measurement_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.measurement_id_seq OWNER TO postgres; + +-- +-- Name: measurement_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.measurement_id_seq OWNED BY public.measurement.id; + + +-- +-- Name: objectExclude; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."objectExclude" ( + "objectType" character varying(32) NOT NULL, + "objectID" integer NOT NULL, + module public."enum_objectExclude_module" NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."objectExclude" OWNER TO postgres; + +-- +-- Name: operation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.operation ( + "createdAt" timestamp with time zone NOT NULL, + id integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.operation OWNER TO postgres; + +-- +-- Name: operationCluster; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."operationCluster" ( + "createdAt" timestamp with time zone NOT NULL, + id integer NOT NULL, + "operationId" integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."operationCluster" OWNER TO postgres; + +-- +-- Name: operationClusterVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."operationClusterVersion" ( + "createdAt" timestamp with time zone NOT NULL, + data jsonb NOT NULL, + "isLatest" boolean NOT NULL, + "modifiedBy" integer, + root integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + version integer NOT NULL +); + + +ALTER TABLE public."operationClusterVersion" OWNER TO postgres; + +-- +-- Name: operationCluster_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."operationCluster_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."operationCluster_id_seq" OWNER TO postgres; + +-- +-- Name: operationCluster_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."operationCluster_id_seq" OWNED BY public."operationCluster".id; + + +-- +-- Name: operationVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."operationVersion" ( + "createdAt" timestamp with time zone NOT NULL, + data jsonb NOT NULL, + "isLatest" boolean NOT NULL, + "modifiedBy" integer, + root integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + version integer NOT NULL +); + + +ALTER TABLE public."operationVersion" OWNER TO postgres; + +-- +-- Name: operation_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.operation_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.operation_id_seq OWNER TO postgres; + +-- +-- Name: operation_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.operation_id_seq OWNED BY public.operation.id; + + +-- +-- Name: organization; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.organization ( + id integer NOT NULL, + name character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + abbreviation character varying(255), + url text, + "parentID" integer, + "nativeName" character varying(255), + comments text, + "collectiveInd" boolean DEFAULT false NOT NULL, + active boolean DEFAULT true NOT NULL, + "deletedAt" timestamp with time zone, + "newOrganizationId" integer, + verified boolean DEFAULT false NOT NULL, + notes text +); + + +ALTER TABLE public.organization OWNER TO postgres; + +-- +-- Name: organizationLocation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."organizationLocation" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "organizationId" integer NOT NULL, + "locationId" integer NOT NULL +); + + +ALTER TABLE public."organizationLocation" OWNER TO postgres; + +-- +-- Name: organization_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.organization_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.organization_id_seq OWNER TO postgres; + +-- +-- Name: organization_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.organization_id_seq OWNED BY public.organization.id; + + +-- +-- Name: parameterValueIndicatorGoal; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."parameterValueIndicatorGoal" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "indicatorGoalId" integer NOT NULL, + "parameterValueId" integer NOT NULL +); + + +ALTER TABLE public."parameterValueIndicatorGoal" OWNER TO postgres; + +-- +-- Name: participant; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.participant ( + id integer NOT NULL, + "hidId" character varying(255), + email character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "hidSub" character varying(255), + "internalUse" character varying(255), + name character varying(255) +); + + +ALTER TABLE public.participant OWNER TO postgres; + +-- +-- Name: participantCountry; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."participantCountry" ( + id integer NOT NULL, + "locationId" integer, + "participantId" integer, + validated boolean DEFAULT true, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."participantCountry" OWNER TO postgres; + +-- +-- Name: participantCountry_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."participantCountry_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."participantCountry_id_seq" OWNER TO postgres; + +-- +-- Name: participantCountry_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."participantCountry_id_seq" OWNED BY public."participantCountry".id; + + +-- +-- Name: participantOrganization; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."participantOrganization" ( + id integer NOT NULL, + "organizationId" integer, + "participantId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + validated boolean DEFAULT true +); + + +ALTER TABLE public."participantOrganization" OWNER TO postgres; + +-- +-- Name: participantOrganization_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."participantOrganization_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."participantOrganization_id_seq" OWNER TO postgres; + +-- +-- Name: participantOrganization_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."participantOrganization_id_seq" OWNED BY public."participantOrganization".id; + + +-- +-- Name: participantRole; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."participantRole" ( + id integer NOT NULL, + "roleId" integer, + "participantId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "objectId" integer, + "objectType" character varying(255) +); + + +ALTER TABLE public."participantRole" OWNER TO postgres; + +-- +-- Name: participantRole_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."participantRole_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."participantRole_id_seq" OWNER TO postgres; + +-- +-- Name: participantRole_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."participantRole_id_seq" OWNED BY public."participantRole".id; + + +-- +-- Name: participant_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.participant_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.participant_id_seq OWNER TO postgres; + +-- +-- Name: participant_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.participant_id_seq OWNED BY public.participant.id; + + +-- +-- Name: permittedAction; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."permittedAction" ( + id character varying(255) NOT NULL, + value jsonb, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."permittedAction" OWNER TO postgres; + +-- +-- Name: plan; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.plan ( + id integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + restricted boolean, + "revisionState" character varying(255) +); + + +ALTER TABLE public.plan OWNER TO postgres; + +-- +-- Name: planBlueprint_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planBlueprint_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planBlueprint_id_seq" OWNER TO postgres; + +-- +-- Name: planBlueprint_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planBlueprint_id_seq" OWNED BY public.blueprint.id; + + +-- +-- Name: planEmergency; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planEmergency" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "planId" integer NOT NULL, + "emergencyId" integer NOT NULL +); + + +ALTER TABLE public."planEmergency" OWNER TO postgres; + +-- +-- Name: planEntity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planEntity" ( + id integer NOT NULL, + "planId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "entityPrototypeId" integer NOT NULL, + "deletedAt" timestamp with time zone, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[] +); + + +ALTER TABLE public."planEntity" OWNER TO postgres; + +-- +-- Name: planEntityVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planEntityVersion" ( + id integer NOT NULL, + "planEntityId" integer, + "customReference" character varying(255), + value json, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[], + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."planEntityVersion" OWNER TO postgres; + +-- +-- Name: planEntityVersion_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planEntityVersion_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planEntityVersion_id_seq" OWNER TO postgres; + +-- +-- Name: planEntityVersion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planEntityVersion_id_seq" OWNED BY public."planEntityVersion".id; + + +-- +-- Name: planEntity_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planEntity_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planEntity_id_seq" OWNER TO postgres; + +-- +-- Name: planEntity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planEntity_id_seq" OWNED BY public."planEntity".id; + + +-- +-- Name: planLocation; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planLocation" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "planId" integer NOT NULL, + "locationId" integer NOT NULL, + id integer NOT NULL, + "deletedAt" timestamp with time zone, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[] +); + + +ALTER TABLE public."planLocation" OWNER TO postgres; + +-- +-- Name: planLocation_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planLocation_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planLocation_id_seq" OWNER TO postgres; + +-- +-- Name: planLocation_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planLocation_id_seq" OWNED BY public."planLocation".id; + + +-- +-- Name: planReportingPeriod; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planReportingPeriod" ( + id integer NOT NULL, + "startDate" date, + "endDate" date, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "expiryDate" date, + "periodNumber" integer, + "planId" integer, + "measurementsGenerated" boolean DEFAULT false +); + + +ALTER TABLE public."planReportingPeriod" OWNER TO postgres; + +-- +-- Name: planReportingPeriod_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planReportingPeriod_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planReportingPeriod_id_seq" OWNER TO postgres; + +-- +-- Name: planReportingPeriod_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planReportingPeriod_id_seq" OWNED BY public."planReportingPeriod".id; + + +-- +-- Name: planTag; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planTag" ( + id integer NOT NULL, + "planId" integer NOT NULL, + name character varying(255) NOT NULL, + public boolean DEFAULT false NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + comment character varying(255), + "revisionState" character varying(255) +); + + +ALTER TABLE public."planTag" OWNER TO postgres; + +-- +-- Name: planTag_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planTag_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planTag_id_seq" OWNER TO postgres; + +-- +-- Name: planTag_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planTag_id_seq" OWNED BY public."planTag".id; + + +-- +-- Name: planVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planVersion" ( + id integer NOT NULL, + "planId" integer, + name character varying(255), + "startDate" date, + "endDate" date, + comments text, + "isForHPCProjects" boolean DEFAULT false NOT NULL, + code character varying(255), + "customLocationCode" character varying(255), + "currentReportingPeriodId" integer, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[], + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "lastPublishedReportingPeriodId" integer, + "clusterSelectionType" character varying(255) +); + + +ALTER TABLE public."planVersion" OWNER TO postgres; + +-- +-- Name: planVersion_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planVersion_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planVersion_id_seq" OWNER TO postgres; + +-- +-- Name: planVersion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planVersion_id_seq" OWNED BY public."planVersion".id; + + +-- +-- Name: planYear; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."planYear" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "planId" integer NOT NULL, + "usageYearId" integer NOT NULL, + id integer NOT NULL, + "deletedAt" timestamp with time zone, + "currentVersion" boolean DEFAULT false NOT NULL, + "latestVersion" boolean DEFAULT false NOT NULL, + "latestTaggedVersion" boolean DEFAULT false NOT NULL, + "versionTags" character varying(8)[] DEFAULT (ARRAY[]::character varying[])::character varying(8)[] +); + + +ALTER TABLE public."planYear" OWNER TO postgres; + +-- +-- Name: planYear_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."planYear_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."planYear_id_seq" OWNER TO postgres; + +-- +-- Name: planYear_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."planYear_id_seq" OWNED BY public."planYear".id; + + +-- +-- Name: plan_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.plan_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.plan_id_seq OWNER TO postgres; + +-- +-- Name: plan_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.plan_id_seq OWNED BY public.plan.id; + + +-- +-- Name: procedureEntityPrototype; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."procedureEntityPrototype" ( + id integer NOT NULL, + "planId" integer NOT NULL, + "entityPrototypeId" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."procedureEntityPrototype" OWNER TO postgres; + +-- +-- Name: procedureEntityPrototype_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."procedureEntityPrototype_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."procedureEntityPrototype_id_seq" OWNER TO postgres; + +-- +-- Name: procedureEntityPrototype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."procedureEntityPrototype_id_seq" OWNED BY public."procedureEntityPrototype".id; + + +-- +-- Name: procedureSection; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."procedureSection" ( + id integer NOT NULL, + name character varying(255), + "order" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "planId" integer NOT NULL, + description json +); + + +ALTER TABLE public."procedureSection" OWNER TO postgres; + +-- +-- Name: procedureSectionField; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."procedureSectionField" ( + "procedureSectionId" integer NOT NULL, + "conditionFieldId" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."procedureSectionField" OWNER TO postgres; + +-- +-- Name: procedureSection_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."procedureSection_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."procedureSection_id_seq" OWNER TO postgres; + +-- +-- Name: procedureSection_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."procedureSection_id_seq" OWNED BY public."procedureSection".id; + + +-- +-- Name: project; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.project ( + id integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + code character varying(255), + "currentPublishedVersionId" integer, + "creatorParticipantId" integer, + "latestVersionId" integer, + "implementationStatus" public."enum_project_implementationStatus", + pdf json +); + + +ALTER TABLE public.project OWNER TO postgres; + +-- +-- Name: projectContact; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectContact" ( + id integer NOT NULL, + "projectVersionId" integer NOT NULL, + name character varying(255) NOT NULL, + email character varying(255), + website character varying(255), + "phoneNumber" character varying(255), + "participantId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."projectContact" OWNER TO postgres; + +-- +-- Name: projectContact_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."projectContact_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."projectContact_id_seq" OWNER TO postgres; + +-- +-- Name: projectContact_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."projectContact_id_seq" OWNED BY public."projectContact".id; + + +-- +-- Name: projectVersionField; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionField" ( + id integer NOT NULL, + "conditionFieldId" integer, + value text, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "projectVersionPlanId" integer +); + + +ALTER TABLE public."projectVersionField" OWNER TO postgres; + +-- +-- Name: projectField_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."projectField_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."projectField_id_seq" OWNER TO postgres; + +-- +-- Name: projectField_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."projectField_id_seq" OWNED BY public."projectVersionField".id; + + +-- +-- Name: projectGlobalClusters; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectGlobalClusters" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "globalClusterId" integer NOT NULL, + "projectVersionId" integer NOT NULL +); + + +ALTER TABLE public."projectGlobalClusters" OWNER TO postgres; + +-- +-- Name: projectLocations; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectLocations" ( + "projectVersionId" integer NOT NULL, + "locationId" integer NOT NULL, + "createdAt" timestamp with time zone, + "updatedAt" timestamp with time zone +); + + +ALTER TABLE public."projectLocations" OWNER TO postgres; + +-- +-- Name: projectVersionAttachment; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionAttachment" ( + "projectVersionId" integer NOT NULL, + "attachmentId" integer NOT NULL, + value jsonb, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + total double precision, + "attachmentVersionId" integer +); + + +ALTER TABLE public."projectVersionAttachment" OWNER TO postgres; + +-- +-- Name: projectVersionGoverningEntity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionGoverningEntity" ( + "projectVersionId" integer NOT NULL, + "governingEntityId" integer NOT NULL, + "createdAt" timestamp with time zone, + "updatedAt" timestamp with time zone, + "governingEntityVersionId" integer +); + + +ALTER TABLE public."projectVersionGoverningEntity" OWNER TO postgres; + +-- +-- Name: projectVersionHistory; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionHistory" ( + id integer NOT NULL, + "projectVersionId" integer NOT NULL, + "participantId" integer NOT NULL, + value json, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."projectVersionHistory" OWNER TO postgres; + +-- +-- Name: projectVersionHistory_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."projectVersionHistory_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."projectVersionHistory_id_seq" OWNER TO postgres; + +-- +-- Name: projectVersionHistory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."projectVersionHistory_id_seq" OWNED BY public."projectVersionHistory".id; + + +-- +-- Name: projectVersionOrganization; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionOrganization" ( + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "organizationId" integer NOT NULL, + "projectVersionId" integer NOT NULL +); + + +ALTER TABLE public."projectVersionOrganization" OWNER TO postgres; + +-- +-- Name: projectVersionPlan; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionPlan" ( + "projectVersionId" integer NOT NULL, + "planId" integer NOT NULL, + "createdAt" timestamp with time zone, + "updatedAt" timestamp with time zone, + id integer NOT NULL, + value jsonb DEFAULT '{}'::jsonb NOT NULL, + "workflowStatusOptionId" integer +); + + +ALTER TABLE public."projectVersionPlan" OWNER TO postgres; + +-- +-- Name: projectVersionPlanEntity; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."projectVersionPlanEntity" ( + "projectVersionId" integer NOT NULL, + "planEntityId" integer NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "planEntityVersionId" integer +); + + +ALTER TABLE public."projectVersionPlanEntity" OWNER TO postgres; + +-- +-- Name: projectVersionPlan_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."projectVersionPlan_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."projectVersionPlan_id_seq" OWNER TO postgres; + +-- +-- Name: projectVersionPlan_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."projectVersionPlan_id_seq" OWNED BY public."projectVersionPlan".id; + + +-- +-- Name: projectVersion_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."projectVersion_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."projectVersion_id_seq" OWNER TO postgres; + +-- +-- Name: projectVersion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."projectVersion_id_seq" OWNED BY public."projectVersion".id; + + +-- +-- Name: project_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.project_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.project_id_seq OWNER TO postgres; + +-- +-- Name: project_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.project_id_seq OWNED BY public.project.id; + + +-- +-- Name: reportDetail; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."reportDetail" ( + id integer NOT NULL, + "flowID" integer NOT NULL, + "versionID" integer NOT NULL, + "contactInfo" text, + source character varying(255) NOT NULL, + date timestamp with time zone, + "sourceID" character varying(255), + "refCode" character varying(255), + verified boolean NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "organizationID" integer +); + + +ALTER TABLE public."reportDetail" OWNER TO postgres; + +-- +-- Name: reportDetail_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."reportDetail_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."reportDetail_id_seq" OWNER TO postgres; + +-- +-- Name: reportDetail_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."reportDetail_id_seq" OWNED BY public."reportDetail".id; + + +-- +-- Name: reportFile; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."reportFile" ( + id integer NOT NULL, + "reportID" integer, + title character varying(255) NOT NULL, + type character varying(255) NOT NULL, + url character varying(255), + "fileAssetID" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."reportFile" OWNER TO postgres; + +-- +-- Name: reportFile_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."reportFile_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."reportFile_id_seq" OWNER TO postgres; + +-- +-- Name: reportFile_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."reportFile_id_seq" OWNED BY public."reportFile".id; + + +-- +-- Name: reportingWindow; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."reportingWindow" ( + "belongsToId" integer, + "belongsToType" public."enum_reportingWindow_belongsToType" NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + id integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."reportingWindow" OWNER TO postgres; + +-- +-- Name: reportingWindowAssignment; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."reportingWindowAssignment" ( + "assigneeId" integer NOT NULL, + "assigneeOperation" integer NOT NULL, + "assigneeType" public."enum_reportingWindowAssignment_assigneeType" NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + id integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "reportingWindowId" integer NOT NULL +); + + +ALTER TABLE public."reportingWindowAssignment" OWNER TO postgres; + +-- +-- Name: reportingWindowAssignmentVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."reportingWindowAssignmentVersion" ( + "createdAt" timestamp with time zone NOT NULL, + data jsonb NOT NULL, + "isLatest" boolean NOT NULL, + "modifiedBy" integer, + root integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + version integer NOT NULL +); + + +ALTER TABLE public."reportingWindowAssignmentVersion" OWNER TO postgres; + +-- +-- Name: reportingWindowAssignment_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."reportingWindowAssignment_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."reportingWindowAssignment_id_seq" OWNER TO postgres; + +-- +-- Name: reportingWindowAssignment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."reportingWindowAssignment_id_seq" OWNED BY public."reportingWindowAssignment".id; + + +-- +-- Name: reportingWindowVersion; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."reportingWindowVersion" ( + "createdAt" timestamp with time zone NOT NULL, + data jsonb NOT NULL, + "isLatest" boolean NOT NULL, + "modifiedBy" integer, + root integer NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + version integer NOT NULL +); + + +ALTER TABLE public."reportingWindowVersion" OWNER TO postgres; + +-- +-- Name: reportingWindow_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."reportingWindow_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."reportingWindow_id_seq" OWNER TO postgres; + +-- +-- Name: reportingWindow_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."reportingWindow_id_seq" OWNED BY public."reportingWindow".id; + + +-- +-- Name: role; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.role ( + id integer NOT NULL, + name character varying(255), + description character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "targetTypes" jsonb +); + + +ALTER TABLE public.role OWNER TO postgres; + +-- +-- Name: roleAuthenticationKey; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."roleAuthenticationKey" ( + id integer NOT NULL, + "roleId" integer, + key character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."roleAuthenticationKey" OWNER TO postgres; + +-- +-- Name: roleAuthenticationKey_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."roleAuthenticationKey_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."roleAuthenticationKey_id_seq" OWNER TO postgres; + +-- +-- Name: roleAuthenticationKey_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."roleAuthenticationKey_id_seq" OWNED BY public."roleAuthenticationKey".id; + + +-- +-- Name: rolePermittedAction; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."rolePermittedAction" ( + id integer NOT NULL, + "roleId" integer, + "permittedActionId" character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."rolePermittedAction" OWNER TO postgres; + +-- +-- Name: rolePermittedAction_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."rolePermittedAction_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."rolePermittedAction_id_seq" OWNER TO postgres; + +-- +-- Name: rolePermittedAction_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."rolePermittedAction_id_seq" OWNED BY public."rolePermittedAction".id; + + +-- +-- Name: role_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.role_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.role_id_seq OWNER TO postgres; + +-- +-- Name: role_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.role_id_seq OWNED BY public.role.id; + + +-- +-- Name: tag; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.tag ( + id integer NOT NULL, + name character varying(255) NOT NULL, + "createdAt" timestamp with time zone DEFAULT now() NOT NULL, + "updatedAt" timestamp with time zone DEFAULT now() NOT NULL +); + + +ALTER TABLE public.tag OWNER TO postgres; + +-- +-- Name: tag_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.tag_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.tag_id_seq OWNER TO postgres; + +-- +-- Name: tag_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.tag_id_seq OWNED BY public.tag.id; + + +-- +-- Name: task; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.task ( + id integer NOT NULL, + command character varying(255) NOT NULL, + requester integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "dataFileHash" character varying(255) NOT NULL, + run boolean DEFAULT false NOT NULL +); + + +ALTER TABLE public.task OWNER TO postgres; + +-- +-- Name: task_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.task_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.task_id_seq OWNER TO postgres; + +-- +-- Name: task_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.task_id_seq OWNED BY public.task.id; + + +-- +-- Name: unit; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.unit ( + id integer NOT NULL, + name character varying(255), + label character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + "unitTypeId" integer +); + + +ALTER TABLE public.unit OWNER TO postgres; + +-- +-- Name: unitType; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."unitType" ( + id integer NOT NULL, + name character varying(255), + label character varying(255), + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."unitType" OWNER TO postgres; + +-- +-- Name: unitType_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."unitType_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."unitType_id_seq" OWNER TO postgres; + +-- +-- Name: unitType_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."unitType_id_seq" OWNED BY public."unitType".id; + + +-- +-- Name: unit_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.unit_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.unit_id_seq OWNER TO postgres; + +-- +-- Name: unit_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.unit_id_seq OWNED BY public.unit.id; + + +-- +-- Name: usageYear; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."usageYear" ( + id integer NOT NULL, + year character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."usageYear" OWNER TO postgres; + +-- +-- Name: usageYear_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."usageYear_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."usageYear_id_seq" OWNER TO postgres; + +-- +-- Name: usageYear_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."usageYear_id_seq" OWNED BY public."usageYear".id; + + +-- +-- Name: workflowRole; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."workflowRole" ( + id integer NOT NULL, + "roleId" integer, + "entityType" character varying(255), + "entityId" integer, + "permittedActionIds" jsonb, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL +); + + +ALTER TABLE public."workflowRole" OWNER TO postgres; + +-- +-- Name: workflowRole_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."workflowRole_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."workflowRole_id_seq" OWNER TO postgres; + +-- +-- Name: workflowRole_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."workflowRole_id_seq" OWNED BY public."workflowRole".id; + + +-- +-- Name: workflowStatusOption; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."workflowStatusOption" ( + id integer NOT NULL, + type character varying(255) NOT NULL, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + value jsonb, + "planId" integer NOT NULL +); + + +ALTER TABLE public."workflowStatusOption" OWNER TO postgres; + +-- +-- Name: workflowStatusOptionStep; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public."workflowStatusOptionStep" ( + id integer NOT NULL, + "fromId" integer, + "toId" integer, + "createdAt" timestamp with time zone NOT NULL, + "updatedAt" timestamp with time zone NOT NULL, + value jsonb +); + + +ALTER TABLE public."workflowStatusOptionStep" OWNER TO postgres; + +-- +-- Name: workflowStatusOptionStep_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."workflowStatusOptionStep_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."workflowStatusOptionStep_id_seq" OWNER TO postgres; + +-- +-- Name: workflowStatusOptionStep_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."workflowStatusOptionStep_id_seq" OWNED BY public."workflowStatusOptionStep".id; + + +-- +-- Name: workflowStatusOption_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public."workflowStatusOption_id_seq" + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public."workflowStatusOption_id_seq" OWNER TO postgres; + +-- +-- Name: workflowStatusOption_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public."workflowStatusOption_id_seq" OWNED BY public."workflowStatusOption".id; + + +-- +-- Name: attachment id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.attachment ALTER COLUMN id SET DEFAULT nextval('public.attachment_id_seq'::regclass); + + +-- +-- Name: attachmentPrototype id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."attachmentPrototype" ALTER COLUMN id SET DEFAULT nextval('public."attachmentPrototype_id_seq"'::regclass); + + +-- +-- Name: attachmentVersion id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."attachmentVersion" ALTER COLUMN id SET DEFAULT nextval('public."attachmentVersion_id_seq"'::regclass); + + +-- +-- Name: authGrantLog id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantLog" ALTER COLUMN id SET DEFAULT nextval('public."authGrantLog_id_seq"'::regclass); + + +-- +-- Name: authGrantee id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantee" ALTER COLUMN id SET DEFAULT nextval('public."authGrantee_id_seq"'::regclass); + + +-- +-- Name: authTarget id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authTarget" ALTER COLUMN id SET DEFAULT nextval('public."authTarget_id_seq"'::regclass); + + +-- +-- Name: blueprint id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blueprint ALTER COLUMN id SET DEFAULT nextval('public."planBlueprint_id_seq"'::regclass); + + +-- +-- Name: budgetSegment id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegment" ALTER COLUMN id SET DEFAULT nextval('public."budgetSegment_id_seq"'::regclass); + + +-- +-- Name: budgetSegmentBreakdown id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegmentBreakdown" ALTER COLUMN id SET DEFAULT nextval('public."budgetSegmentBreakdown_id_seq"'::regclass); + + +-- +-- Name: budgetSegmentBreakdownEntity id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegmentBreakdownEntity" ALTER COLUMN id SET DEFAULT nextval('public."budgetSegmentBreakdownEntity_id_seq"'::regclass); + + +-- +-- Name: category id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.category ALTER COLUMN id SET DEFAULT nextval('public.category_id_seq'::regclass); + + +-- +-- Name: client id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.client ALTER COLUMN id SET DEFAULT nextval('public.client_id_seq'::regclass); + + +-- +-- Name: conditionField id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionField" ALTER COLUMN id SET DEFAULT nextval('public."conditionField_id_seq"'::regclass); + + +-- +-- Name: currency id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.currency ALTER COLUMN id SET DEFAULT nextval('public.currency_id_seq'::regclass); + + +-- +-- Name: disaggregationCategory id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategory" ALTER COLUMN id SET DEFAULT nextval('public."disaggregationCategory_id_seq"'::regclass); + + +-- +-- Name: disaggregationCategoryGroup id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategoryGroup" ALTER COLUMN id SET DEFAULT nextval('public."disaggregationCategoryGroup_id_seq"'::regclass); + + +-- +-- Name: disaggregationModel id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationModel" ALTER COLUMN id SET DEFAULT nextval('public."disaggregationModel_id_seq"'::regclass); + + +-- +-- Name: emergency id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.emergency ALTER COLUMN id SET DEFAULT nextval('public.emergency_id_seq'::regclass); + + +-- +-- Name: endpointLog id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."endpointLog" ALTER COLUMN id SET DEFAULT nextval('public."endpointLog_id_seq"'::regclass); + + +-- +-- Name: entityCategories id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityCategories" ALTER COLUMN id SET DEFAULT nextval('public."entityCategories_id_seq"'::regclass); + + +-- +-- Name: entityCategory id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityCategory" ALTER COLUMN id SET DEFAULT nextval('public."entityCategory_id_seq"'::regclass); + + +-- +-- Name: entityPrototype id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityPrototype" ALTER COLUMN id SET DEFAULT nextval('public."entityPrototype_id_seq"'::regclass); + + +-- +-- Name: externalData id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."externalData" ALTER COLUMN id SET DEFAULT nextval('public."externalData_id_seq"'::regclass); + + +-- +-- Name: externalReference id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."externalReference" ALTER COLUMN id SET DEFAULT nextval('public."externalReference_id_seq"'::regclass); + + +-- +-- Name: fileAssetEntity id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."fileAssetEntity" ALTER COLUMN id SET DEFAULT nextval('public."fileAssetEntity_id_seq"'::regclass); + + +-- +-- Name: flow id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.flow ALTER COLUMN id SET DEFAULT nextval('public.flow_id_seq'::regclass); + + +-- +-- Name: form id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.form ALTER COLUMN id SET DEFAULT nextval('public.form_id_seq'::regclass); + + +-- +-- Name: globalCluster id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalCluster" ALTER COLUMN id SET DEFAULT nextval('public."globalCluster_id_seq"'::regclass); + + +-- +-- Name: globalClusterAssociation id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalClusterAssociation" ALTER COLUMN id SET DEFAULT nextval('public."globalClusterAssociation_id_seq"'::regclass); + + +-- +-- Name: globalIndicator id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalIndicator" ALTER COLUMN id SET DEFAULT nextval('public."globalIndicator_id_seq"'::regclass); + + +-- +-- Name: governingEntity id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntity" ALTER COLUMN id SET DEFAULT nextval('public."governingEntity_id_seq"'::regclass); + + +-- +-- Name: governingEntityVersion id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntityVersion" ALTER COLUMN id SET DEFAULT nextval('public."governingEntityVersion_id_seq"'::regclass); + + +-- +-- Name: iatiActivity id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiActivity" ALTER COLUMN id SET DEFAULT nextval('public."iatiActivity_id_seq"'::regclass); + + +-- +-- Name: iatiFTSMap id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMap" ALTER COLUMN id SET DEFAULT nextval('public."iatiFTSMap_id_seq"'::regclass); + + +-- +-- Name: iatiFTSMatch id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMatch" ALTER COLUMN id SET DEFAULT nextval('public."iatiFTSMatch_id_seq"'::regclass); + + +-- +-- Name: iatiHumanitarianScope id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiHumanitarianScope" ALTER COLUMN id SET DEFAULT nextval('public."iatiHumanitarianScope_id_seq"'::regclass); + + +-- +-- Name: iatiParticipatingOrg id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiParticipatingOrg" ALTER COLUMN id SET DEFAULT nextval('public."iatiParticipatingOrg_id_seq"'::regclass); + + +-- +-- Name: iatiRecipientCountry id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiRecipientCountry" ALTER COLUMN id SET DEFAULT nextval('public."iatiRecipientCountry_id_seq"'::regclass); + + +-- +-- Name: iatiSector id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiSector" ALTER COLUMN id SET DEFAULT nextval('public."iatiSector_id_seq"'::regclass); + + +-- +-- Name: iatiTransaction id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiTransaction" ALTER COLUMN id SET DEFAULT nextval('public."iatiTransaction_id_seq"'::regclass); + + +-- +-- Name: job id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.job ALTER COLUMN id SET DEFAULT nextval('public.job_id_seq'::regclass); + + +-- +-- Name: location id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.location ALTER COLUMN id SET DEFAULT nextval('public.location_id_seq'::regclass); + + +-- +-- Name: measurement id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.measurement ALTER COLUMN id SET DEFAULT nextval('public.measurement_id_seq'::regclass); + + +-- +-- Name: measurementVersion id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."measurementVersion" ALTER COLUMN id SET DEFAULT nextval('public."measurementVersion_id_seq"'::regclass); + + +-- +-- Name: operation id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.operation ALTER COLUMN id SET DEFAULT nextval('public.operation_id_seq'::regclass); + + +-- +-- Name: operationCluster id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationCluster" ALTER COLUMN id SET DEFAULT nextval('public."operationCluster_id_seq"'::regclass); + + +-- +-- Name: organization id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.organization ALTER COLUMN id SET DEFAULT nextval('public.organization_id_seq'::regclass); + + +-- +-- Name: participant id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.participant ALTER COLUMN id SET DEFAULT nextval('public.participant_id_seq'::regclass); + + +-- +-- Name: participantCountry id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantCountry" ALTER COLUMN id SET DEFAULT nextval('public."participantCountry_id_seq"'::regclass); + + +-- +-- Name: participantOrganization id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantOrganization" ALTER COLUMN id SET DEFAULT nextval('public."participantOrganization_id_seq"'::regclass); + + +-- +-- Name: participantRole id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantRole" ALTER COLUMN id SET DEFAULT nextval('public."participantRole_id_seq"'::regclass); + + +-- +-- Name: plan id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.plan ALTER COLUMN id SET DEFAULT nextval('public.plan_id_seq'::regclass); + + +-- +-- Name: planEntity id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntity" ALTER COLUMN id SET DEFAULT nextval('public."planEntity_id_seq"'::regclass); + + +-- +-- Name: planEntityVersion id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntityVersion" ALTER COLUMN id SET DEFAULT nextval('public."planEntityVersion_id_seq"'::regclass); + + +-- +-- Name: planLocation id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planLocation" ALTER COLUMN id SET DEFAULT nextval('public."planLocation_id_seq"'::regclass); + + +-- +-- Name: planReportingPeriod id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planReportingPeriod" ALTER COLUMN id SET DEFAULT nextval('public."planReportingPeriod_id_seq"'::regclass); + + +-- +-- Name: planTag id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planTag" ALTER COLUMN id SET DEFAULT nextval('public."planTag_id_seq"'::regclass); + + +-- +-- Name: planVersion id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planVersion" ALTER COLUMN id SET DEFAULT nextval('public."planVersion_id_seq"'::regclass); + + +-- +-- Name: planYear id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planYear" ALTER COLUMN id SET DEFAULT nextval('public."planYear_id_seq"'::regclass); + + +-- +-- Name: procedureEntityPrototype id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureEntityPrototype" ALTER COLUMN id SET DEFAULT nextval('public."procedureEntityPrototype_id_seq"'::regclass); + + +-- +-- Name: procedureSection id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureSection" ALTER COLUMN id SET DEFAULT nextval('public."procedureSection_id_seq"'::regclass); + + +-- +-- Name: project id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.project ALTER COLUMN id SET DEFAULT nextval('public.project_id_seq'::regclass); + + +-- +-- Name: projectContact id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectContact" ALTER COLUMN id SET DEFAULT nextval('public."projectContact_id_seq"'::regclass); + + +-- +-- Name: projectVersion id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersion" ALTER COLUMN id SET DEFAULT nextval('public."projectVersion_id_seq"'::regclass); + + +-- +-- Name: projectVersionComment id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionComment" ALTER COLUMN id SET DEFAULT nextval('public."fulfillmentComment_id_seq"'::regclass); + + +-- +-- Name: projectVersionField id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionField" ALTER COLUMN id SET DEFAULT nextval('public."projectField_id_seq"'::regclass); + + +-- +-- Name: projectVersionHistory id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionHistory" ALTER COLUMN id SET DEFAULT nextval('public."projectVersionHistory_id_seq"'::regclass); + + +-- +-- Name: projectVersionPlan id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlan" ALTER COLUMN id SET DEFAULT nextval('public."projectVersionPlan_id_seq"'::regclass); + + +-- +-- Name: reportDetail id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportDetail" ALTER COLUMN id SET DEFAULT nextval('public."reportDetail_id_seq"'::regclass); + + +-- +-- Name: reportFile id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportFile" ALTER COLUMN id SET DEFAULT nextval('public."reportFile_id_seq"'::regclass); + + +-- +-- Name: reportingWindow id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindow" ALTER COLUMN id SET DEFAULT nextval('public."reportingWindow_id_seq"'::regclass); + + +-- +-- Name: reportingWindowAssignment id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowAssignment" ALTER COLUMN id SET DEFAULT nextval('public."reportingWindowAssignment_id_seq"'::regclass); + + +-- +-- Name: role id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.role ALTER COLUMN id SET DEFAULT nextval('public.role_id_seq'::regclass); + + +-- +-- Name: roleAuthenticationKey id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."roleAuthenticationKey" ALTER COLUMN id SET DEFAULT nextval('public."roleAuthenticationKey_id_seq"'::regclass); + + +-- +-- Name: rolePermittedAction id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."rolePermittedAction" ALTER COLUMN id SET DEFAULT nextval('public."rolePermittedAction_id_seq"'::regclass); + + +-- +-- Name: tag id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tag ALTER COLUMN id SET DEFAULT nextval('public.tag_id_seq'::regclass); + + +-- +-- Name: task id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.task ALTER COLUMN id SET DEFAULT nextval('public.task_id_seq'::regclass); + + +-- +-- Name: unit id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.unit ALTER COLUMN id SET DEFAULT nextval('public.unit_id_seq'::regclass); + + +-- +-- Name: unitType id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."unitType" ALTER COLUMN id SET DEFAULT nextval('public."unitType_id_seq"'::regclass); + + +-- +-- Name: usageYear id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."usageYear" ALTER COLUMN id SET DEFAULT nextval('public."usageYear_id_seq"'::regclass); + + +-- +-- Name: workflowRole id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowRole" ALTER COLUMN id SET DEFAULT nextval('public."workflowRole_id_seq"'::regclass); + + +-- +-- Name: workflowStatusOption id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOption" ALTER COLUMN id SET DEFAULT nextval('public."workflowStatusOption_id_seq"'::regclass); + + +-- +-- Name: workflowStatusOptionStep id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOptionStep" ALTER COLUMN id SET DEFAULT nextval('public."workflowStatusOptionStep_id_seq"'::regclass); + + +-- +-- Name: SequelizeMeta SequelizeMeta_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."SequelizeMeta" + ADD CONSTRAINT "SequelizeMeta_pkey" PRIMARY KEY (name); + + +-- +-- Name: attachmentPrototype attachmentPrototype_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."attachmentPrototype" + ADD CONSTRAINT "attachmentPrototype_pkey" PRIMARY KEY (id); + + +-- +-- Name: attachmentVersion attachmentVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."attachmentVersion" + ADD CONSTRAINT "attachmentVersion_pkey" PRIMARY KEY (id); + + +-- +-- Name: attachment attachment_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.attachment + ADD CONSTRAINT attachment_pkey PRIMARY KEY (id); + + +-- +-- Name: authGrantLog authGrantLog_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantLog" + ADD CONSTRAINT "authGrantLog_pkey" PRIMARY KEY (id); + + +-- +-- Name: authGrant authGrant_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrant" + ADD CONSTRAINT "authGrant_pkey" PRIMARY KEY (target, grantee); + + +-- +-- Name: authGrantee authGrantee_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantee" + ADD CONSTRAINT "authGrantee_pkey" PRIMARY KEY (id); + + +-- +-- Name: authInvite authInvite_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authInvite" + ADD CONSTRAINT "authInvite_pkey" PRIMARY KEY (target, email); + + +-- +-- Name: authTarget authTarget_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authTarget" + ADD CONSTRAINT "authTarget_pkey" PRIMARY KEY (id); + + +-- +-- Name: authToken authToken_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authToken" + ADD CONSTRAINT "authToken_pkey" PRIMARY KEY ("tokenHash"); + + +-- +-- Name: budgetSegmentBreakdownEntity budgetSegmentBreakdownEntity_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegmentBreakdownEntity" + ADD CONSTRAINT "budgetSegmentBreakdownEntity_pkey" PRIMARY KEY (id); + + +-- +-- Name: budgetSegmentBreakdown budgetSegmentBreakdown_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegmentBreakdown" + ADD CONSTRAINT "budgetSegmentBreakdown_pkey" PRIMARY KEY (id); + + +-- +-- Name: budgetSegment budgetSegment_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegment" + ADD CONSTRAINT "budgetSegment_pkey" PRIMARY KEY (id); + + +-- +-- Name: cache cache_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.cache + ADD CONSTRAINT cache_pkey PRIMARY KEY (namespace, fingerprint); + + +-- +-- Name: categoryGroup categoryGroup_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."categoryGroup" + ADD CONSTRAINT "categoryGroup_name_key" UNIQUE (name); + + +-- +-- Name: categoryGroup categoryGroup_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."categoryGroup" + ADD CONSTRAINT "categoryGroup_pkey" PRIMARY KEY (type); + + +-- +-- Name: categoryRef categoryRef_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."categoryRef" + ADD CONSTRAINT "categoryRef_pkey" PRIMARY KEY ("objectID", "versionID", "objectType", "categoryID"); + + +-- +-- Name: category category_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.category + ADD CONSTRAINT category_pkey PRIMARY KEY (id); + + +-- +-- Name: client client_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.client + ADD CONSTRAINT client_pkey PRIMARY KEY (id); + + +-- +-- Name: conditionFieldReliesOn conditionFieldReliesOn_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionFieldReliesOn" + ADD CONSTRAINT "conditionFieldReliesOn_pkey" PRIMARY KEY ("reliedOnById", "reliesOnId"); + + +-- +-- Name: conditionFieldType conditionFieldType_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionFieldType" + ADD CONSTRAINT "conditionFieldType_pkey" PRIMARY KEY (type); + + +-- +-- Name: conditionField conditionField_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionField" + ADD CONSTRAINT "conditionField_pkey" PRIMARY KEY (id); + + +-- +-- Name: currency currency_code_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.currency + ADD CONSTRAINT currency_code_key UNIQUE (code); + + +-- +-- Name: currency currency_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.currency + ADD CONSTRAINT currency_pkey PRIMARY KEY (id); + + +-- +-- Name: disaggregationCategoryGroup disaggregationCategoryGroup_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategoryGroup" + ADD CONSTRAINT "disaggregationCategoryGroup_id_key" UNIQUE (id); + + +-- +-- Name: disaggregationCategoryGroup disaggregationCategoryGroup_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategoryGroup" + ADD CONSTRAINT "disaggregationCategoryGroup_pkey" PRIMARY KEY (id); + + +-- +-- Name: disaggregationCategory disaggregationCategory_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategory" + ADD CONSTRAINT "disaggregationCategory_pkey" PRIMARY KEY (id); + + +-- +-- Name: disaggregationModel disaggregationModel_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationModel" + ADD CONSTRAINT "disaggregationModel_pkey" PRIMARY KEY (id); + + +-- +-- Name: emergencyLocation emergencyLocation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."emergencyLocation" + ADD CONSTRAINT "emergencyLocation_pkey" PRIMARY KEY ("emergencyId", "locationId"); + + +-- +-- Name: emergency emergency_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.emergency + ADD CONSTRAINT emergency_pkey PRIMARY KEY (id); + + +-- +-- Name: endpointLog endpointLog_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."endpointLog" + ADD CONSTRAINT "endpointLog_pkey" PRIMARY KEY (id); + + +-- +-- Name: endpointTrace endpointTrace_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."endpointTrace" + ADD CONSTRAINT "endpointTrace_pkey" PRIMARY KEY (id); + + +-- +-- Name: endpointUsage endpointUsage_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."endpointUsage" + ADD CONSTRAINT "endpointUsage_pkey" PRIMARY KEY (path, method); + + +-- +-- Name: entitiesAssociation entitiesAssociation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entitiesAssociation" + ADD CONSTRAINT "entitiesAssociation_pkey" PRIMARY KEY ("parentId", "childId"); + + +-- +-- Name: entityCategories entityCategories_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityCategories" + ADD CONSTRAINT "entityCategories_pkey" PRIMARY KEY (id); + + +-- +-- Name: entityCategory entityCategory_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityCategory" + ADD CONSTRAINT "entityCategory_pkey" PRIMARY KEY (id); + + +-- +-- Name: entityPrototype entityPrototype_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityPrototype" + ADD CONSTRAINT "entityPrototype_pkey" PRIMARY KEY (id); + + +-- +-- Name: externalData externalData_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."externalData" + ADD CONSTRAINT "externalData_pkey" PRIMARY KEY (id); + + +-- +-- Name: externalReference externalReference_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."externalReference" + ADD CONSTRAINT "externalReference_pkey" PRIMARY KEY (id); + + +-- +-- Name: fileAssetEntity fileAssetEntity_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."fileAssetEntity" + ADD CONSTRAINT "fileAssetEntity_pkey" PRIMARY KEY (id); + + +-- +-- Name: fileRecord fileRecord_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."fileRecord" + ADD CONSTRAINT "fileRecord_pkey" PRIMARY KEY (namespace, hash); + + +-- +-- Name: flowLink flowLink_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."flowLink" + ADD CONSTRAINT "flowLink_pkey" PRIMARY KEY ("parentID", "childID"); + + +-- +-- Name: flowObjectType flowObjectType_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."flowObjectType" + ADD CONSTRAINT "flowObjectType_pkey" PRIMARY KEY (type); + + +-- +-- Name: flowObject flowObject_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."flowObject" + ADD CONSTRAINT "flowObject_pkey" PRIMARY KEY ("flowID", "objectID", "versionID", "objectType", "refDirection"); + + +-- +-- Name: flow flow_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.flow + ADD CONSTRAINT flow_pkey PRIMARY KEY (id, "versionID"); + + +-- +-- Name: formVersion formVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."formVersion" + ADD CONSTRAINT "formVersion_pkey" PRIMARY KEY (root, version); + + +-- +-- Name: form form_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.form + ADD CONSTRAINT form_pkey PRIMARY KEY (id); + + +-- +-- Name: projectVersionComment fulfillmentComment_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionComment" + ADD CONSTRAINT "fulfillmentComment_pkey" PRIMARY KEY (id); + + +-- +-- Name: globalClusterAssociation globalClusterAssociation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalClusterAssociation" + ADD CONSTRAINT "globalClusterAssociation_pkey" PRIMARY KEY (id); + + +-- +-- Name: globalCluster globalCluster_hrinfoId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalCluster" + ADD CONSTRAINT "globalCluster_hrinfoId_key" UNIQUE ("hrinfoId"); + + +-- +-- Name: globalCluster globalCluster_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalCluster" + ADD CONSTRAINT "globalCluster_pkey" PRIMARY KEY (id); + + +-- +-- Name: globalIndicator globalIndicator_hrinfoId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalIndicator" + ADD CONSTRAINT "globalIndicator_hrinfoId_key" UNIQUE ("hrinfoId"); + + +-- +-- Name: globalIndicator globalIndicator_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalIndicator" + ADD CONSTRAINT "globalIndicator_pkey" PRIMARY KEY (id); + + +-- +-- Name: governingEntityVersion governingEntityVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntityVersion" + ADD CONSTRAINT "governingEntityVersion_pkey" PRIMARY KEY (id); + + +-- +-- Name: governingEntity governingEntity_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntity" + ADD CONSTRAINT "governingEntity_pkey" PRIMARY KEY (id); + + +-- +-- Name: highWater highWater_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."highWater" + ADD CONSTRAINT "highWater_pkey" PRIMARY KEY ("jobName"); + + +-- +-- Name: iatiActivity iatiActivity_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiActivity" + ADD CONSTRAINT "iatiActivity_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiFTSMap iatiFTSMap_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMap" + ADD CONSTRAINT "iatiFTSMap_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiFTSMatch iatiFTSMatch_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMatch" + ADD CONSTRAINT "iatiFTSMatch_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiHumanitarianScope iatiHumanitarianScope_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiHumanitarianScope" + ADD CONSTRAINT "iatiHumanitarianScope_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiParticipatingOrg iatiParticipatingOrg_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiParticipatingOrg" + ADD CONSTRAINT "iatiParticipatingOrg_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiPublisher iatiPublisher_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiPublisher" + ADD CONSTRAINT "iatiPublisher_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiRecipientCountry iatiRecipientCountry_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiRecipientCountry" + ADD CONSTRAINT "iatiRecipientCountry_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiSector iatiSector_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiSector" + ADD CONSTRAINT "iatiSector_pkey" PRIMARY KEY (id); + + +-- +-- Name: iatiTransaction iatiTransaction_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiTransaction" + ADD CONSTRAINT "iatiTransaction_pkey" PRIMARY KEY (id); + + +-- +-- Name: icon icon_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.icon + ADD CONSTRAINT icon_pkey PRIMARY KEY (id); + + +-- +-- Name: jobAssociation jobAssociation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."jobAssociation" + ADD CONSTRAINT "jobAssociation_pkey" PRIMARY KEY ("jobId", "objectId", "objectType"); + + +-- +-- Name: job job_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.job + ADD CONSTRAINT job_pkey PRIMARY KEY (id); + + +-- +-- Name: legacy legacy_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.legacy + ADD CONSTRAINT legacy_pkey PRIMARY KEY ("objectType", "objectID", "legacyID"); + + +-- +-- Name: location location_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.location + ADD CONSTRAINT location_pkey PRIMARY KEY (id); + + +-- +-- Name: measurementVersion measurementVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."measurementVersion" + ADD CONSTRAINT "measurementVersion_pkey" PRIMARY KEY (id); + + +-- +-- Name: measurement measurement_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.measurement + ADD CONSTRAINT measurement_pkey PRIMARY KEY (id); + + +-- +-- Name: objectExclude objectExclude_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."objectExclude" + ADD CONSTRAINT "objectExclude_pkey" PRIMARY KEY ("objectType", "objectID", module); + + +-- +-- Name: operationClusterVersion operationClusterVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationClusterVersion" + ADD CONSTRAINT "operationClusterVersion_pkey" PRIMARY KEY (root, version); + + +-- +-- Name: operationCluster operationCluster_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationCluster" + ADD CONSTRAINT "operationCluster_pkey" PRIMARY KEY (id); + + +-- +-- Name: operationVersion operationVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationVersion" + ADD CONSTRAINT "operationVersion_pkey" PRIMARY KEY (root, version); + + +-- +-- Name: operation operation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.operation + ADD CONSTRAINT operation_pkey PRIMARY KEY (id); + + +-- +-- Name: organizationLocation organizationLocation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."organizationLocation" + ADD CONSTRAINT "organizationLocation_pkey" PRIMARY KEY ("organizationId", "locationId"); + + +-- +-- Name: organization organization_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.organization + ADD CONSTRAINT organization_name_key UNIQUE (name); + + +-- +-- Name: organization organization_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.organization + ADD CONSTRAINT organization_pkey PRIMARY KEY (id); + + +-- +-- Name: parameterValueIndicatorGoal parameterValueIndicatorGoal_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."parameterValueIndicatorGoal" + ADD CONSTRAINT "parameterValueIndicatorGoal_pkey" PRIMARY KEY ("indicatorGoalId", "parameterValueId"); + + +-- +-- Name: participantCountry participantCountry_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantCountry" + ADD CONSTRAINT "participantCountry_pkey" PRIMARY KEY (id); + + +-- +-- Name: participantOrganization participantOrganization_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantOrganization" + ADD CONSTRAINT "participantOrganization_pkey" PRIMARY KEY (id); + + +-- +-- Name: participantRole participantRole_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantRole" + ADD CONSTRAINT "participantRole_pkey" PRIMARY KEY (id); + + +-- +-- Name: participant participant_hidId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.participant + ADD CONSTRAINT "participant_hidId_key" UNIQUE ("hidId"); + + +-- +-- Name: participant participant_internalUse_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.participant + ADD CONSTRAINT "participant_internalUse_key" UNIQUE ("internalUse"); + + +-- +-- Name: participant participant_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.participant + ADD CONSTRAINT participant_pkey PRIMARY KEY (id); + + +-- +-- Name: permittedAction permittedAction_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."permittedAction" + ADD CONSTRAINT "permittedAction_pkey" PRIMARY KEY (id); + + +-- +-- Name: blueprint planBlueprint_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blueprint + ADD CONSTRAINT "planBlueprint_pkey" PRIMARY KEY (id); + + +-- +-- Name: planEmergency planEmergency_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEmergency" + ADD CONSTRAINT "planEmergency_pkey" PRIMARY KEY ("planId", "emergencyId"); + + +-- +-- Name: planEntityVersion planEntityVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntityVersion" + ADD CONSTRAINT "planEntityVersion_pkey" PRIMARY KEY (id); + + +-- +-- Name: planEntity planEntity_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntity" + ADD CONSTRAINT "planEntity_pkey" PRIMARY KEY (id); + + +-- +-- Name: planLocation planLocation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planLocation" + ADD CONSTRAINT "planLocation_pkey" PRIMARY KEY (id); + + +-- +-- Name: planReportingPeriod planReportingPeriod_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planReportingPeriod" + ADD CONSTRAINT "planReportingPeriod_pkey" PRIMARY KEY (id); + + +-- +-- Name: planTag planTag_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planTag" + ADD CONSTRAINT "planTag_pkey" PRIMARY KEY (id); + + +-- +-- Name: planVersion planVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planVersion" + ADD CONSTRAINT "planVersion_pkey" PRIMARY KEY (id); + + +-- +-- Name: planYear planYear_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planYear" + ADD CONSTRAINT "planYear_pkey" PRIMARY KEY (id); + + +-- +-- Name: plan plan_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.plan + ADD CONSTRAINT plan_pkey PRIMARY KEY (id); + + +-- +-- Name: procedureEntityPrototype procedureEntityPrototype_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureEntityPrototype" + ADD CONSTRAINT "procedureEntityPrototype_pkey" PRIMARY KEY (id); + + +-- +-- Name: procedureEntityPrototype procedureEntityPrototype_planId_entityPrototypeId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureEntityPrototype" + ADD CONSTRAINT "procedureEntityPrototype_planId_entityPrototypeId_key" UNIQUE ("planId", "entityPrototypeId"); + + +-- +-- Name: procedureSectionField procedureSectionField_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureSectionField" + ADD CONSTRAINT "procedureSectionField_pkey" PRIMARY KEY ("procedureSectionId", "conditionFieldId"); + + +-- +-- Name: procedureSection procedureSection_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureSection" + ADD CONSTRAINT "procedureSection_pkey" PRIMARY KEY (id); + + +-- +-- Name: projectVersionAttachment projectAttachment_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionAttachment" + ADD CONSTRAINT "projectAttachment_pkey" PRIMARY KEY ("projectVersionId", "attachmentId"); + + +-- +-- Name: projectContact projectContact_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectContact" + ADD CONSTRAINT "projectContact_pkey" PRIMARY KEY (id); + + +-- +-- Name: projectVersionField projectField_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionField" + ADD CONSTRAINT "projectField_pkey" PRIMARY KEY (id); + + +-- +-- Name: projectGlobalClusters projectGlobalClusters_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectGlobalClusters" + ADD CONSTRAINT "projectGlobalClusters_pkey" PRIMARY KEY ("projectVersionId", "globalClusterId"); + + +-- +-- Name: projectVersionGoverningEntity projectGoverningEntities_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionGoverningEntity" + ADD CONSTRAINT "projectGoverningEntities_pkey" PRIMARY KEY ("projectVersionId", "governingEntityId"); + + +-- +-- Name: projectLocations projectLocations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectLocations" + ADD CONSTRAINT "projectLocations_pkey" PRIMARY KEY ("projectVersionId", "locationId"); + + +-- +-- Name: projectVersionOrganization projectOrganizations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionOrganization" + ADD CONSTRAINT "projectOrganizations_pkey" PRIMARY KEY ("projectVersionId", "organizationId"); + + +-- +-- Name: projectVersionPlanEntity projectPlanEntities_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlanEntity" + ADD CONSTRAINT "projectPlanEntities_pkey" PRIMARY KEY ("projectVersionId", "planEntityId"); + + +-- +-- Name: projectVersionPlan projectPlans_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlan" + ADD CONSTRAINT "projectPlans_pkey" PRIMARY KEY ("projectVersionId", "planId"); + + +-- +-- Name: projectVersionAttachment projectVersionAttachment_attachmentId_projectVersionId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionAttachment" + ADD CONSTRAINT "projectVersionAttachment_attachmentId_projectVersionId_key" UNIQUE ("attachmentId", "projectVersionId"); + + +-- +-- Name: projectVersionGoverningEntity projectVersionGoverningEntity_governingEntityId_projectVers_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionGoverningEntity" + ADD CONSTRAINT "projectVersionGoverningEntity_governingEntityId_projectVers_key" UNIQUE ("governingEntityId", "projectVersionId"); + + +-- +-- Name: projectVersionHistory projectVersionHistory_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionHistory" + ADD CONSTRAINT "projectVersionHistory_pkey" PRIMARY KEY (id); + + +-- +-- Name: projectVersionPlanEntity projectVersionPlanEntity_planEntityId_projectVersionId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlanEntity" + ADD CONSTRAINT "projectVersionPlanEntity_planEntityId_projectVersionId_key" UNIQUE ("planEntityId", "projectVersionId"); + + +-- +-- Name: projectVersionPlan projectVersionPlan_unique_id; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlan" + ADD CONSTRAINT "projectVersionPlan_unique_id" UNIQUE (id); + + +-- +-- Name: projectVersion projectVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersion" + ADD CONSTRAINT "projectVersion_pkey" PRIMARY KEY (id); + + +-- +-- Name: project project_code_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.project + ADD CONSTRAINT project_code_key UNIQUE (code); + + +-- +-- Name: project project_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.project + ADD CONSTRAINT project_pkey PRIMARY KEY (id); + + +-- +-- Name: reportDetail reportDetail_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportDetail" + ADD CONSTRAINT "reportDetail_pkey" PRIMARY KEY (id); + + +-- +-- Name: reportFile reportFile_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportFile" + ADD CONSTRAINT "reportFile_pkey" PRIMARY KEY (id); + + +-- +-- Name: reportingWindowAssignmentVersion reportingWindowAssignmentVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowAssignmentVersion" + ADD CONSTRAINT "reportingWindowAssignmentVersion_pkey" PRIMARY KEY (root, version); + + +-- +-- Name: reportingWindowAssignment reportingWindowAssignment_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowAssignment" + ADD CONSTRAINT "reportingWindowAssignment_pkey" PRIMARY KEY (id); + + +-- +-- Name: reportingWindowVersion reportingWindowVersion_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowVersion" + ADD CONSTRAINT "reportingWindowVersion_pkey" PRIMARY KEY (root, version); + + +-- +-- Name: reportingWindow reportingwindow_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindow" + ADD CONSTRAINT reportingwindow_pkey PRIMARY KEY (id); + + +-- +-- Name: roleAuthenticationKey roleAuthenticationKey_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."roleAuthenticationKey" + ADD CONSTRAINT "roleAuthenticationKey_pkey" PRIMARY KEY (id); + + +-- +-- Name: rolePermittedAction rolePermittedAction_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."rolePermittedAction" + ADD CONSTRAINT "rolePermittedAction_pkey" PRIMARY KEY (id); + + +-- +-- Name: role role_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.role + ADD CONSTRAINT role_pkey PRIMARY KEY (id); + + +-- +-- Name: tag tag_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tag + ADD CONSTRAINT tag_name_key UNIQUE (name); + + +-- +-- Name: tag tag_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tag + ADD CONSTRAINT tag_pkey PRIMARY KEY (id); + + +-- +-- Name: task task_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.task + ADD CONSTRAINT task_pkey PRIMARY KEY (id); + + +-- +-- Name: unitType unitType_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."unitType" + ADD CONSTRAINT "unitType_pkey" PRIMARY KEY (id); + + +-- +-- Name: unit unit_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.unit + ADD CONSTRAINT unit_pkey PRIMARY KEY (id); + + +-- +-- Name: usageYear usageYear_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."usageYear" + ADD CONSTRAINT "usageYear_pkey" PRIMARY KEY (id); + + +-- +-- Name: usageYear usageYear_year_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."usageYear" + ADD CONSTRAINT "usageYear_year_key" UNIQUE (year); + + +-- +-- Name: workflowRole workflowRole_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowRole" + ADD CONSTRAINT "workflowRole_pkey" PRIMARY KEY (id); + + +-- +-- Name: workflowStatusOptionStep workflowStatusOptionStep_fromId_toId_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOptionStep" + ADD CONSTRAINT "workflowStatusOptionStep_fromId_toId_key" UNIQUE ("fromId", "toId"); + + +-- +-- Name: workflowStatusOptionStep workflowStatusOptionStep_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOptionStep" + ADD CONSTRAINT "workflowStatusOptionStep_pkey" PRIMARY KEY (id); + + +-- +-- Name: workflowStatusOption workflowStatusOption_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOption" + ADD CONSTRAINT "workflowStatusOption_pkey" PRIMARY KEY (id); + + +-- +-- Name: attachmentPrototype_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentPrototype_plan_index" ON public."attachmentPrototype" USING btree ("planId"); + + +-- +-- Name: attachmentPrototype_refCode_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentPrototype_refCode_index" ON public."attachmentPrototype" USING btree ("refCode"); + + +-- +-- Name: attachmentVersion_attachmentId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentVersion_attachmentId_idx" ON public."attachmentVersion" USING btree ("attachmentId"); + + +-- +-- Name: attachmentVersion_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentVersion_current_index" ON public."attachmentVersion" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: attachmentVersion_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentVersion_latestTagged_index" ON public."attachmentVersion" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: attachmentVersion_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentVersion_latest_index" ON public."attachmentVersion" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: attachmentVersion_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachmentVersion_versionTags_index" ON public."attachmentVersion" USING gin ("versionTags"); + + +-- +-- Name: attachment_attachmentPrototypeId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachment_attachmentPrototypeId_idx" ON public.attachment USING btree ("attachmentPrototypeId"); + + +-- +-- Name: attachment_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX attachment_current_index ON public.attachment USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: attachment_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachment_latestTagged_index" ON public.attachment USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: attachment_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX attachment_latest_index ON public.attachment USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: attachment_object_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX attachment_object_index ON public.attachment USING btree ("objectId", "objectType"); + + +-- +-- Name: attachment_planId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachment_planId_idx" ON public.attachment USING btree ("planId"); + + +-- +-- Name: attachment_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "attachment_versionTags_index" ON public.attachment USING gin ("versionTags"); + + +-- +-- Name: auth_grant_grantee; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX auth_grant_grantee ON public."authGrant" USING btree (grantee); + + +-- +-- Name: auth_grant_log_grantee; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX auth_grant_log_grantee ON public."authGrantLog" USING btree (grantee); + + +-- +-- Name: auth_grant_log_target; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX auth_grant_log_target ON public."authGrantLog" USING btree (target); + + +-- +-- Name: auth_grant_target; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX auth_grant_target ON public."authGrant" USING btree (target); + + +-- +-- Name: auth_grantee_type_grantee_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX auth_grantee_type_grantee_id ON public."authGrantee" USING btree (type, "granteeId"); + + +-- +-- Name: auth_invite_email; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX auth_invite_email ON public."authInvite" USING btree (email); + + +-- +-- Name: auth_invite_target; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX auth_invite_target ON public."authInvite" USING btree (target); + + +-- +-- Name: auth_target_type_target_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX auth_target_type_target_id ON public."authTarget" USING btree (type, "targetId"); + + +-- +-- Name: budgetSegmentBreakdownEntity_budgetSegmentBreakdownId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "budgetSegmentBreakdownEntity_budgetSegmentBreakdownId_idx" ON public."budgetSegmentBreakdownEntity" USING btree ("budgetSegmentBreakdownId"); + + +-- +-- Name: budgetSegmentBreakdownEntity_budgetSegmentBreakdownId_objec_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "budgetSegmentBreakdownEntity_budgetSegmentBreakdownId_objec_idx" ON public."budgetSegmentBreakdownEntity" USING btree ("budgetSegmentBreakdownId", "objectType"); + + +-- +-- Name: budgetSegmentBreakdown_budgetSegmentId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "budgetSegmentBreakdown_budgetSegmentId_idx" ON public."budgetSegmentBreakdown" USING btree ("budgetSegmentId"); + + +-- +-- Name: budgetSegment_projectVersionId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "budgetSegment_projectVersionId_idx" ON public."budgetSegment" USING btree ("projectVersionId"); + + +-- +-- Name: cache_namespace_fingerprint; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX cache_namespace_fingerprint ON public.cache USING btree (namespace, fingerprint); + + +-- +-- Name: categoryLegacy_group_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "categoryLegacy_group_idx" ON public."categoryLegacy" USING btree ("group"); + + +-- +-- Name: categoryLegacy_pkey; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "categoryLegacy_pkey" ON public."categoryLegacy" USING btree (id, "group", "legacyID"); + + +-- +-- Name: categoryRef_categoryID_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "categoryRef_categoryID_idx" ON public."categoryRef" USING btree ("categoryID"); + + +-- +-- Name: category_group_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX category_group_idx ON public.category USING btree ("group"); + + +-- +-- Name: category_name_group; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX category_name_group ON public.category USING btree (name, "group"); + + +-- +-- Name: category_parent_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX category_parent_index ON public.category USING btree ("parentID"); + + +-- +-- Name: childID; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "childID" ON public."flowLink" USING btree ("childID"); + + +-- +-- Name: client_id_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX client_id_index ON public.client USING btree ("clientId"); + + +-- +-- Name: conditionFieldReliesOn_reliesOnId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "conditionFieldReliesOn_reliesOnId_idx" ON public."conditionFieldReliesOn" USING btree ("reliesOnId"); + + +-- +-- Name: conditionField_fieldType_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "conditionField_fieldType_idx" ON public."conditionField" USING btree ("fieldType"); + + +-- +-- Name: conditionField_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "conditionField_plan_index" ON public."conditionField" USING btree ("planId"); + + +-- +-- Name: conditionfield_lowercase_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX conditionfield_lowercase_name_index ON public."conditionField" USING btree (lower((name)::text)); + + +-- +-- Name: disaggregationCategoryGroup_planId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "disaggregationCategoryGroup_planId_idx" ON public."disaggregationCategoryGroup" USING btree ("planId"); + + +-- +-- Name: disaggregationCategoryGroup_unitTypeId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "disaggregationCategoryGroup_unitTypeId_idx" ON public."disaggregationCategoryGroup" USING btree ("unitTypeId"); + + +-- +-- Name: disaggregationCategory_disaggregationCategoryGroupId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "disaggregationCategory_disaggregationCategoryGroupId_idx" ON public."disaggregationCategory" USING btree ("disaggregationCategoryGroupId"); + + +-- +-- Name: disaggregationModel_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "disaggregationModel_plan_index" ON public."disaggregationModel" USING btree ("planId"); + + +-- +-- Name: emergencyLocation_locationId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "emergencyLocation_locationId_idx" ON public."emergencyLocation" USING btree ("locationId"); + + +-- +-- Name: emergency_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX emergency_name_index ON public.emergency USING btree (name); + + +-- +-- Name: emergency_status_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX emergency_status_index ON public.emergency USING btree (active DESC NULLS LAST, restricted NULLS FIRST); + + +-- +-- Name: endpointLog_entity_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "endpointLog_entity_index" ON public."endpointLog" USING btree ("entityId", "entityType"); + + +-- +-- Name: endpointLog_participantId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "endpointLog_participantId_idx" ON public."endpointLog" USING btree ("participantId"); + + +-- +-- Name: entitiesAssociation_child_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "entitiesAssociation_child_index" ON public."entitiesAssociation" USING btree ("childId", "childType"); + + +-- +-- Name: entitiesAssociation_parent_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "entitiesAssociation_parent_index" ON public."entitiesAssociation" USING btree ("parentId", "parentType"); + + +-- +-- Name: entityCategories_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "entityCategories_plan_index" ON public."entityCategories" USING btree ("planId"); + + +-- +-- Name: entityCategory_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "entityCategory_plan_index" ON public."entityCategory" USING btree ("planId"); + + +-- +-- Name: entityPrototype_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "entityPrototype_plan_index" ON public."entityPrototype" USING btree ("planId"); + + +-- +-- Name: entityPrototype_refCode_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "entityPrototype_refCode_index" ON public."entityPrototype" USING btree ("refCode"); + + +-- +-- Name: externalData_flow_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "externalData_flow_index" ON public."externalData" USING btree ("flowID", "versionID"); + + +-- +-- Name: externalData_system_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "externalData_system_index" ON public."externalData" USING btree ("systemID"); + + +-- +-- Name: externalReference_flow_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "externalReference_flow_index" ON public."externalReference" USING btree ("flowID", "versionID"); + + +-- +-- Name: externalReference_system_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "externalReference_system_index" ON public."externalReference" USING btree ("systemID"); + + +-- +-- Name: file_record_hash; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX file_record_hash ON public."fileRecord" USING btree (hash); + + +-- +-- Name: file_record_namespace; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX file_record_namespace ON public."fileRecord" USING btree (namespace); + + +-- +-- Name: flowObject_directional_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "flowObject_directional_index" ON public."flowObject" USING btree ("flowID", "objectID", "objectType", "refDirection"); + + +-- +-- Name: flowObject_objectID_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "flowObject_objectID_idx" ON public."flowObject" USING btree ("objectID"); + + +-- +-- Name: flowObject_objectType_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "flowObject_objectType_index" ON public."flowObject" USING btree ("objectType"); + + +-- +-- Name: flowObject_referential_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "flowObject_referential_index" ON public."flowObject" USING btree ("flowID", "versionID", "objectType"); + + +-- +-- Name: flow_object_flow_i_d_object_i_d_object_type_ref_direction; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX flow_object_flow_i_d_object_i_d_object_type_ref_direction ON public."flowObject" USING btree ("flowID", "objectID", "objectType", "refDirection"); + + +-- +-- Name: flow_referential_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX flow_referential_index ON public.flow USING btree (id, "versionID"); + + +-- +-- Name: flow_status_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX flow_status_index ON public.flow USING btree ("deletedAt" NULLS FIRST, "activeStatus" DESC NULLS LAST, restricted NULLS FIRST); + + +-- +-- Name: flow_updated_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX flow_updated_index ON public.flow USING btree ("updatedAt"); + + +-- +-- Name: formLatestVersions; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "formLatestVersions" ON public."formVersion" USING btree ("isLatest", root) WHERE ("isLatest" = true); + + +-- +-- Name: globalClusterAssociation_globalCluster_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "globalClusterAssociation_globalCluster_index" ON public."globalClusterAssociation" USING btree ("globalClusterId"); + + +-- +-- Name: globalClusterAssociation_governingEntity_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "globalClusterAssociation_governingEntity_index" ON public."globalClusterAssociation" USING btree ("governingEntityId"); + + +-- +-- Name: globalCluster_code_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "globalCluster_code_index" ON public."globalCluster" USING btree (code); + + +-- +-- Name: globalCluster_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "globalCluster_name_index" ON public."globalCluster" USING btree (name); + + +-- +-- Name: globalCluster_parent_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "globalCluster_parent_index" ON public."globalCluster" USING btree ("parentId"); + + +-- +-- Name: governingEntityVersion_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntityVersion_current_index" ON public."governingEntityVersion" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: governingEntityVersion_governingEntityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntityVersion_governingEntityId_idx" ON public."governingEntityVersion" USING btree ("governingEntityId"); + + +-- +-- Name: governingEntityVersion_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntityVersion_latestTagged_index" ON public."governingEntityVersion" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: governingEntityVersion_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntityVersion_latest_index" ON public."governingEntityVersion" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: governingEntityVersion_updated_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntityVersion_updated_index" ON public."governingEntityVersion" USING btree ("updatedAt"); + + +-- +-- Name: governingEntityVersion_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntityVersion_versionTags_index" ON public."governingEntityVersion" USING gin ("versionTags"); + + +-- +-- Name: governingEntity_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_current_index" ON public."governingEntity" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: governingEntity_entityPrototypeId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_entityPrototypeId_idx" ON public."governingEntity" USING btree ("entityPrototypeId"); + + +-- +-- Name: governingEntity_entityType_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_entityType_index" ON public."governingEntity" USING btree (((public."entityType"("governingEntity".*))::text)); + + +-- +-- Name: governingEntity_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_latestTagged_index" ON public."governingEntity" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: governingEntity_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_latest_index" ON public."governingEntity" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: governingEntity_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_plan_index" ON public."governingEntity" USING btree ("planId"); + + +-- +-- Name: governingEntity_updated_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_updated_index" ON public."governingEntity" USING btree ("updatedAt"); + + +-- +-- Name: governingEntity_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "governingEntity_versionTags_index" ON public."governingEntity" USING gin ("versionTags"); + + +-- +-- Name: iatiActivity_iatiPublisherId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiActivity_iatiPublisherId_idx" ON public."iatiActivity" USING btree ("iatiPublisherId"); + + +-- +-- Name: iatiFTSMap_iatiActivityID_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiFTSMap_iatiActivityID_idx" ON public."iatiFTSMap" USING btree ("iatiActivityID"); + + +-- +-- Name: iatiFTSMap_iatiPublisherID_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiFTSMap_iatiPublisherID_idx" ON public."iatiFTSMap" USING btree ("iatiPublisherID"); + + +-- +-- Name: iatiFTSMatch_iatiFTSMapID_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiFTSMatch_iatiFTSMapID_idx" ON public."iatiFTSMatch" USING btree ("iatiFTSMapID"); + + +-- +-- Name: iatiHumanitarianScope_iatiActivityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiHumanitarianScope_iatiActivityId_idx" ON public."iatiHumanitarianScope" USING btree ("iatiActivityId"); + + +-- +-- Name: iatiParticipatingOrg_iatiActivityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiParticipatingOrg_iatiActivityId_idx" ON public."iatiParticipatingOrg" USING btree ("iatiActivityId"); + + +-- +-- Name: iatiRecipientCountry_iatiActivityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiRecipientCountry_iatiActivityId_idx" ON public."iatiRecipientCountry" USING btree ("iatiActivityId"); + + +-- +-- Name: iatiSector_iatiActivityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiSector_iatiActivityId_idx" ON public."iatiSector" USING btree ("iatiActivityId"); + + +-- +-- Name: iatiTransaction_iatiActivityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiTransaction_iatiActivityId_idx" ON public."iatiTransaction" USING btree ("iatiActivityId"); + + +-- +-- Name: iatiTransaction_iatiFTSMapId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "iatiTransaction_iatiFTSMapId_idx" ON public."iatiTransaction" USING btree ("iatiFTSMapId"); + + +-- +-- Name: location_iso3_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX location_iso3_index ON public.location USING btree (iso3); + + +-- +-- Name: location_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX location_name_index ON public.location USING btree (name); + + +-- +-- Name: location_parentId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "location_parentId_idx" ON public.location USING btree ("parentId"); + + +-- +-- Name: measurementVersion_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurementVersion_current_index" ON public."measurementVersion" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: measurementVersion_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurementVersion_latestTagged_index" ON public."measurementVersion" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: measurementVersion_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurementVersion_latest_index" ON public."measurementVersion" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: measurementVersion_measurementId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurementVersion_measurementId_idx" ON public."measurementVersion" USING btree ("measurementId"); + + +-- +-- Name: measurementVersion_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurementVersion_versionTags_index" ON public."measurementVersion" USING gin ("versionTags"); + + +-- +-- Name: measurement_attachment_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX measurement_attachment_index ON public.measurement USING btree ("attachmentId"); + + +-- +-- Name: measurement_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX measurement_current_index ON public.measurement USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: measurement_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurement_latestTagged_index" ON public.measurement USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: measurement_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX measurement_latest_index ON public.measurement USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: measurement_planReportingPeriod_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurement_planReportingPeriod_index" ON public.measurement USING btree ("planReportingPeriodId"); + + +-- +-- Name: measurement_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "measurement_versionTags_index" ON public.measurement USING gin ("versionTags"); + + +-- +-- Name: operationClusterLatestVersions; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "operationClusterLatestVersions" ON public."operationClusterVersion" USING btree ("isLatest", root) WHERE ("isLatest" = true); + + +-- +-- Name: operationLatestVersions; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "operationLatestVersions" ON public."operationVersion" USING btree ("isLatest", root) WHERE ("isLatest" = true); + + +-- +-- Name: organizationLocation_locationId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "organizationLocation_locationId_idx" ON public."organizationLocation" USING btree ("locationId"); + + +-- +-- Name: organization_abbreviation_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX organization_abbreviation_index ON public.organization USING btree (abbreviation); + + +-- +-- Name: organization_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX organization_name_index ON public.organization USING btree (name); + + +-- +-- Name: organization_nativeName_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "organization_nativeName_index" ON public.organization USING btree ("nativeName"); + + +-- +-- Name: organization_newOrganizationId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "organization_newOrganizationId_idx" ON public.organization USING btree ("newOrganizationId"); + + +-- +-- Name: organization_parent_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX organization_parent_index ON public.organization USING btree ("parentID"); + + +-- +-- Name: parentID; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "parentID" ON public."flowLink" USING btree ("parentID"); + + +-- +-- Name: participantCountry_location_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantCountry_location_index" ON public."participantCountry" USING btree ("locationId"); + + +-- +-- Name: participantCountry_participant_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantCountry_participant_index" ON public."participantCountry" USING btree ("participantId"); + + +-- +-- Name: participantOrganization_organization_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantOrganization_organization_index" ON public."participantOrganization" USING btree ("organizationId"); + + +-- +-- Name: participantOrganization_participant_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantOrganization_participant_index" ON public."participantOrganization" USING btree ("participantId"); + + +-- +-- Name: participantRole_object_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantRole_object_index" ON public."participantRole" USING btree ("objectId", "objectType"); + + +-- +-- Name: participantRole_participant_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantRole_participant_index" ON public."participantRole" USING btree ("participantId"); + + +-- +-- Name: participantRole_permittedAction_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantRole_permittedAction_index" ON public."rolePermittedAction" USING btree ("permittedActionId"); + + +-- +-- Name: participantRole_role_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantRole_role_index" ON public."participantRole" USING btree ("roleId"); + + +-- +-- Name: participantRole_role_index2; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "participantRole_role_index2" ON public."rolePermittedAction" USING btree ("roleId"); + + +-- +-- Name: participant_hidSub_key; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "participant_hidSub_key" ON public.participant USING btree ("hidSub"); + + +-- +-- Name: planEmergency_emergencyId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEmergency_emergencyId_idx" ON public."planEmergency" USING btree ("emergencyId"); + + +-- +-- Name: planEntityVersion_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntityVersion_current_index" ON public."planEntityVersion" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: planEntityVersion_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntityVersion_latestTagged_index" ON public."planEntityVersion" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: planEntityVersion_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntityVersion_latest_index" ON public."planEntityVersion" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: planEntityVersion_planEntityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntityVersion_planEntityId_idx" ON public."planEntityVersion" USING btree ("planEntityId"); + + +-- +-- Name: planEntityVersion_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntityVersion_versionTags_index" ON public."planEntityVersion" USING gin ("versionTags"); + + +-- +-- Name: planEntity_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntity_current_index" ON public."planEntity" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: planEntity_entityPrototypeId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntity_entityPrototypeId_idx" ON public."planEntity" USING btree ("entityPrototypeId"); + + +-- +-- Name: planEntity_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntity_latestTagged_index" ON public."planEntity" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: planEntity_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntity_latest_index" ON public."planEntity" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: planEntity_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntity_plan_index" ON public."planEntity" USING btree ("planId"); + + +-- +-- Name: planEntity_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planEntity_versionTags_index" ON public."planEntity" USING gin ("versionTags"); + + +-- +-- Name: planLocation_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planLocation_current_index" ON public."planLocation" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: planLocation_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planLocation_latestTagged_index" ON public."planLocation" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: planLocation_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planLocation_latest_index" ON public."planLocation" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: planLocation_location_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planLocation_location_index" ON public."planLocation" USING btree ("locationId"); + + +-- +-- Name: planLocation_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planLocation_plan_index" ON public."planLocation" USING btree ("planId"); + + +-- +-- Name: planLocation_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planLocation_versionTags_index" ON public."planLocation" USING gin ("versionTags"); + + +-- +-- Name: planReportingPeriod_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planReportingPeriod_plan_index" ON public."planReportingPeriod" USING btree ("planId"); + + +-- +-- Name: planTag_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planTag_plan_index" ON public."planTag" USING btree ("planId"); + + +-- +-- Name: planTag_public_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planTag_public_index" ON public."planTag" USING btree (public); + + +-- +-- Name: planVersion_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_current_index" ON public."planVersion" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: planVersion_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_latestTagged_index" ON public."planVersion" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: planVersion_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_latest_index" ON public."planVersion" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: planVersion_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_name_index" ON public."planVersion" USING btree (name); + + +-- +-- Name: planVersion_planReportingPeriod_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_planReportingPeriod_index" ON public."planVersion" USING btree ("currentReportingPeriodId"); + + +-- +-- Name: planVersion_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_plan_index" ON public."planVersion" USING btree ("planId"); + + +-- +-- Name: planVersion_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planVersion_versionTags_index" ON public."planVersion" USING gin ("versionTags"); + + +-- +-- Name: planYear_current_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planYear_current_index" ON public."planYear" USING btree ("currentVersion") WHERE ("currentVersion" = true); + + +-- +-- Name: planYear_latestTagged_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planYear_latestTagged_index" ON public."planYear" USING btree ("latestTaggedVersion") WHERE ("latestTaggedVersion" = true); + + +-- +-- Name: planYear_latest_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planYear_latest_index" ON public."planYear" USING btree ("latestVersion") WHERE ("latestVersion" = true); + + +-- +-- Name: planYear_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planYear_plan_index" ON public."planYear" USING btree ("planId"); + + +-- +-- Name: planYear_usageYear_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planYear_usageYear_index" ON public."planYear" USING btree ("usageYearId"); + + +-- +-- Name: planYear_versionTags_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "planYear_versionTags_index" ON public."planYear" USING gin ("versionTags"); + + +-- +-- Name: procedureEntityPrototype_entityPrototype_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "procedureEntityPrototype_entityPrototype_index" ON public."procedureEntityPrototype" USING btree ("entityPrototypeId"); + + +-- +-- Name: procedureEntityPrototype_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "procedureEntityPrototype_plan_index" ON public."procedureEntityPrototype" USING btree ("planId"); + + +-- +-- Name: procedureSectionField_conditionField_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "procedureSectionField_conditionField_index" ON public."procedureSectionField" USING btree ("conditionFieldId"); + + +-- +-- Name: procedureSectionField_procedureSection_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "procedureSectionField_procedureSection_index" ON public."procedureSectionField" USING btree ("procedureSectionId"); + + +-- +-- Name: procedureSection_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "procedureSection_plan_index" ON public."procedureSection" USING btree ("planId"); + + +-- +-- Name: projectAttachment_attachment_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectAttachment_attachment_index" ON public."projectVersionAttachment" USING btree ("attachmentId"); + + +-- +-- Name: projectAttachment_projectVersion_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectAttachment_projectVersion_index" ON public."projectVersionAttachment" USING btree ("projectVersionId"); + + +-- +-- Name: projectContact_participantId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectContact_participantId_idx" ON public."projectContact" USING btree ("participantId"); + + +-- +-- Name: projectContact_projectVersionId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectContact_projectVersionId_index" ON public."projectContact" USING btree ("projectVersionId"); + + +-- +-- Name: projectField_value_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectField_value_index" ON public."projectVersionField" USING gin (to_tsvector('english'::regconfig, value)); + + +-- +-- Name: projectGlobalClusters_globalClusterId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectGlobalClusters_globalClusterId_index" ON public."projectGlobalClusters" USING btree ("globalClusterId"); + + +-- +-- Name: projectGlobalClusters_projectVersionId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectGlobalClusters_projectVersionId_index" ON public."projectGlobalClusters" USING btree ("projectVersionId"); + + +-- +-- Name: projectGoverningEntities_governingEntityId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectGoverningEntities_governingEntityId_index" ON public."projectVersionGoverningEntity" USING btree ("governingEntityId"); + + +-- +-- Name: projectGoverningEntities_projectVersionId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectGoverningEntities_projectVersionId_index" ON public."projectVersionGoverningEntity" USING btree ("projectVersionId"); + + +-- +-- Name: projectLocations_locationId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectLocations_locationId_index" ON public."projectLocations" USING btree ("locationId"); + + +-- +-- Name: projectLocations_projectVersionId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectLocations_projectVersionId_index" ON public."projectLocations" USING btree ("projectVersionId"); + + +-- +-- Name: projectOrganizations_organizationId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectOrganizations_organizationId_index" ON public."projectVersionOrganization" USING btree ("organizationId"); + + +-- +-- Name: projectOrganizations_projectVersionId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectOrganizations_projectVersionId_index" ON public."projectVersionOrganization" USING btree ("projectVersionId"); + + +-- +-- Name: projectPlans_planId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectPlans_planId_index" ON public."projectVersionPlan" USING btree ("planId"); + + +-- +-- Name: projectPlans_projectVersionId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectPlans_projectVersionId_index" ON public."projectVersionPlan" USING btree ("projectVersionId"); + + +-- +-- Name: projectPlans_projectVersionId_planId_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectPlans_projectVersionId_planId_index" ON public."projectVersionPlan" USING btree ("planId", "projectVersionId"); + + +-- +-- Name: projectVersionAttachment_attachmentVersionId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionAttachment_attachmentVersionId_idx" ON public."projectVersionAttachment" USING btree ("attachmentVersionId"); + + +-- +-- Name: projectVersionComment_participantId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionComment_participantId_idx" ON public."projectVersionComment" USING btree ("participantId"); + + +-- +-- Name: projectVersionComment_projectVersionPlanId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionComment_projectVersionPlanId_idx" ON public."projectVersionComment" USING btree ("projectVersionPlanId"); + + +-- +-- Name: projectVersionGoverningEntity_governingEntityVersionId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionGoverningEntity_governingEntityVersionId_idx" ON public."projectVersionGoverningEntity" USING btree ("governingEntityVersionId"); + + +-- +-- Name: projectVersionPlanEntity_planEntityId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionPlanEntity_planEntityId_idx" ON public."projectVersionPlanEntity" USING btree ("planEntityId"); + + +-- +-- Name: projectVersionPlanEntity_planEntityVersionId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionPlanEntity_planEntityVersionId_idx" ON public."projectVersionPlanEntity" USING btree ("planEntityVersionId"); + + +-- +-- Name: projectVersionPlan_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionPlan_id_idx" ON public."projectVersionPlan" USING btree (id); + + +-- +-- Name: projectVersionPlan_workflowStatusOptionId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersionPlan_workflowStatusOptionId_idx" ON public."projectVersionPlan" USING btree ("workflowStatusOptionId"); + + +-- +-- Name: projectVersion_startDate_endDate_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "projectVersion_startDate_endDate_index" ON public."projectVersion" USING btree ("startDate", "endDate"); + + +-- +-- Name: projectVersion_version_projectId_key; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "projectVersion_version_projectId_key" ON public."projectVersion" USING btree (version, "projectId"); + + +-- +-- Name: project_code_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX project_code_index ON public.project USING btree (code); + + +-- +-- Name: project_creatorParticipant_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "project_creatorParticipant_index" ON public.project USING btree ("creatorParticipantId"); + + +-- +-- Name: project_currentPublishedVersion_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "project_currentPublishedVersion_index" ON public.project USING btree ("currentPublishedVersionId"); + + +-- +-- Name: project_latestVersion_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "project_latestVersion_index" ON public.project USING btree ("latestVersionId"); + + +-- +-- Name: projectversion_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX projectversion_name_index ON public."projectVersion" USING btree (name); + + +-- +-- Name: projectversion_project_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX projectversion_project_index ON public."projectVersion" USING btree ("projectId"); + + +-- +-- Name: projectversionfield_conditionfieldid_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX projectversionfield_conditionfieldid_idx ON public."projectVersionField" USING btree ("conditionFieldId"); + + +-- +-- Name: projectversionfield_projectversionplanid_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX projectversionfield_projectversionplanid_idx ON public."projectVersionField" USING btree ("projectVersionPlanId"); + + +-- +-- Name: reportDetail_flow_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "reportDetail_flow_index" ON public."reportDetail" USING btree ("flowID", "versionID"); + + +-- +-- Name: reportDetail_organizationID_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "reportDetail_organizationID_idx" ON public."reportDetail" USING btree ("organizationID"); + + +-- +-- Name: reportFile_fileAsset_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "reportFile_fileAsset_index" ON public."reportFile" USING btree ("fileAssetID"); + + +-- +-- Name: reportFile_report_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "reportFile_report_index" ON public."reportFile" USING btree ("reportID"); + + +-- +-- Name: reportingWindowAssignmentLatestVersions; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "reportingWindowAssignmentLatestVersions" ON public."reportingWindowAssignmentVersion" USING btree ("isLatest", root) WHERE ("isLatest" = true); + + +-- +-- Name: reportingWindowLatestVersions; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX "reportingWindowLatestVersions" ON public."reportingWindowVersion" USING btree ("isLatest", root) WHERE ("isLatest" = true); + + +-- +-- Name: roleAuthenticationKey_roleId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "roleAuthenticationKey_roleId_idx" ON public."roleAuthenticationKey" USING btree ("roleId"); + + +-- +-- Name: role_name_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX role_name_index ON public.role USING btree (name); + + +-- +-- Name: unit_unitTypeId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "unit_unitTypeId_idx" ON public.unit USING btree ("unitTypeId"); + + +-- +-- Name: upper_case_category_name; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX upper_case_category_name ON public.category USING btree (lower((name)::text)); + + +-- +-- Name: workflowRole_role_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "workflowRole_role_index" ON public."workflowRole" USING btree ("roleId"); + + +-- +-- Name: workflowStatusOptionStep_toId_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "workflowStatusOptionStep_toId_idx" ON public."workflowStatusOptionStep" USING btree ("toId"); + + +-- +-- Name: workflowStatusOption_plan_index; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX "workflowStatusOption_plan_index" ON public."workflowStatusOption" USING btree ("planId"); + + +-- +-- Name: attachmentPrototype attachmentPrototype_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."attachmentPrototype" + ADD CONSTRAINT "attachmentPrototype_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: attachmentVersion attachmentVersion_attachmentId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."attachmentVersion" + ADD CONSTRAINT "attachmentVersion_attachmentId_fkey" FOREIGN KEY ("attachmentId") REFERENCES public.attachment(id); + + +-- +-- Name: attachment attachment_attachmentPrototypeId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.attachment + ADD CONSTRAINT "attachment_attachmentPrototypeId_fkey" FOREIGN KEY ("attachmentPrototypeId") REFERENCES public."attachmentPrototype"(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: attachment attachment_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.attachment + ADD CONSTRAINT "attachment_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id); + + +-- +-- Name: authGrantLog authGrantLog_actor_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantLog" + ADD CONSTRAINT "authGrantLog_actor_fkey" FOREIGN KEY (actor) REFERENCES public.participant(id); + + +-- +-- Name: authGrantLog authGrantLog_grantee_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantLog" + ADD CONSTRAINT "authGrantLog_grantee_fkey" FOREIGN KEY (grantee) REFERENCES public."authGrantee"(id); + + +-- +-- Name: authGrantLog authGrantLog_target_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrantLog" + ADD CONSTRAINT "authGrantLog_target_fkey" FOREIGN KEY (target) REFERENCES public."authTarget"(id); + + +-- +-- Name: authGrant authGrant_grantee_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrant" + ADD CONSTRAINT "authGrant_grantee_fkey" FOREIGN KEY (grantee) REFERENCES public."authGrantee"(id) ON UPDATE CASCADE; + + +-- +-- Name: authGrant authGrant_target_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authGrant" + ADD CONSTRAINT "authGrant_target_fkey" FOREIGN KEY (target) REFERENCES public."authTarget"(id) ON UPDATE CASCADE; + + +-- +-- Name: authInvite authInvite_actor_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authInvite" + ADD CONSTRAINT "authInvite_actor_fkey" FOREIGN KEY (actor) REFERENCES public.participant(id); + + +-- +-- Name: authInvite authInvite_target_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authInvite" + ADD CONSTRAINT "authInvite_target_fkey" FOREIGN KEY (target) REFERENCES public."authTarget"(id); + + +-- +-- Name: authToken authToken_participant_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."authToken" + ADD CONSTRAINT "authToken_participant_fkey" FOREIGN KEY (participant) REFERENCES public.participant(id); + + +-- +-- Name: budgetSegmentBreakdownEntity budgetSegmentBreakdownEntity_budgetSegmentBreakdownId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegmentBreakdownEntity" + ADD CONSTRAINT "budgetSegmentBreakdownEntity_budgetSegmentBreakdownId_fkey" FOREIGN KEY ("budgetSegmentBreakdownId") REFERENCES public."budgetSegmentBreakdown"(id) ON DELETE CASCADE; + + +-- +-- Name: budgetSegmentBreakdown budgetSegmentBreakdown_budgetSegmentId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegmentBreakdown" + ADD CONSTRAINT "budgetSegmentBreakdown_budgetSegmentId_fkey" FOREIGN KEY ("budgetSegmentId") REFERENCES public."budgetSegment"(id) ON DELETE CASCADE; + + +-- +-- Name: budgetSegment budgetSegment_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."budgetSegment" + ADD CONSTRAINT "budgetSegment_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: categoryLegacy categoryLegacy_group_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."categoryLegacy" + ADD CONSTRAINT "categoryLegacy_group_fkey" FOREIGN KEY ("group") REFERENCES public."categoryGroup"(type) DEFERRABLE; + + +-- +-- Name: categoryLegacy categoryLegacy_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."categoryLegacy" + ADD CONSTRAINT "categoryLegacy_id_fkey" FOREIGN KEY (id) REFERENCES public.category(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: categoryRef categoryRef_categoryID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."categoryRef" + ADD CONSTRAINT "categoryRef_categoryID_fkey" FOREIGN KEY ("categoryID") REFERENCES public.category(id) ON DELETE CASCADE; + + +-- +-- Name: category category_group_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.category + ADD CONSTRAINT category_group_fkey FOREIGN KEY ("group") REFERENCES public."categoryGroup"(type) ON UPDATE CASCADE DEFERRABLE; + + +-- +-- Name: category category_parentID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.category + ADD CONSTRAINT "category_parentID_fkey" FOREIGN KEY ("parentID") REFERENCES public.category(id) ON UPDATE CASCADE; + + +-- +-- Name: conditionFieldReliesOn conditionFieldReliesOn_reliedOnById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionFieldReliesOn" + ADD CONSTRAINT "conditionFieldReliesOn_reliedOnById_fkey" FOREIGN KEY ("reliedOnById") REFERENCES public."conditionField"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: conditionFieldReliesOn conditionFieldReliesOn_reliesOnId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionFieldReliesOn" + ADD CONSTRAINT "conditionFieldReliesOn_reliesOnId_fkey" FOREIGN KEY ("reliesOnId") REFERENCES public."conditionField"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: conditionField conditionField_fieldType_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionField" + ADD CONSTRAINT "conditionField_fieldType_fkey" FOREIGN KEY ("fieldType") REFERENCES public."conditionFieldType"(type) ON UPDATE CASCADE DEFERRABLE; + + +-- +-- Name: conditionField conditionField_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."conditionField" + ADD CONSTRAINT "conditionField_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: disaggregationCategoryGroup disaggregationCategoryGroup_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategoryGroup" + ADD CONSTRAINT "disaggregationCategoryGroup_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: disaggregationCategoryGroup disaggregationCategoryGroup_unitTypeId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategoryGroup" + ADD CONSTRAINT "disaggregationCategoryGroup_unitTypeId_fkey" FOREIGN KEY ("unitTypeId") REFERENCES public."unitType"(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: disaggregationCategory disaggregationCategory_disaggregationCategoryGroupId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationCategory" + ADD CONSTRAINT "disaggregationCategory_disaggregationCategoryGroupId_fkey" FOREIGN KEY ("disaggregationCategoryGroupId") REFERENCES public."disaggregationCategoryGroup"(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: disaggregationModel disaggregationModel_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."disaggregationModel" + ADD CONSTRAINT "disaggregationModel_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: emergencyLocation emergencyLocation_emergencyId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."emergencyLocation" + ADD CONSTRAINT "emergencyLocation_emergencyId_fkey" FOREIGN KEY ("emergencyId") REFERENCES public.emergency(id) ON DELETE CASCADE; + + +-- +-- Name: emergencyLocation emergencyLocation_locationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."emergencyLocation" + ADD CONSTRAINT "emergencyLocation_locationId_fkey" FOREIGN KEY ("locationId") REFERENCES public.location(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: endpointLog endpointLog_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."endpointLog" + ADD CONSTRAINT "endpointLog_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id) ON DELETE CASCADE; + + +-- +-- Name: entityPrototype entityPrototype_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."entityPrototype" + ADD CONSTRAINT "entityPrototype_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: flowObject flowObject_objectType_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."flowObject" + ADD CONSTRAINT "flowObject_objectType_fkey" FOREIGN KEY ("objectType") REFERENCES public."flowObjectType"(type) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE; + + +-- +-- Name: formVersion formVersion_modifiedBy_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."formVersion" + ADD CONSTRAINT "formVersion_modifiedBy_fkey" FOREIGN KEY ("modifiedBy") REFERENCES public.participant(id); + + +-- +-- Name: formVersion formVersion_root_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."formVersion" + ADD CONSTRAINT "formVersion_root_fkey" FOREIGN KEY (root) REFERENCES public.form(id); + + +-- +-- Name: projectVersionComment fulfillmentComment_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionComment" + ADD CONSTRAINT "fulfillmentComment_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id); + + +-- +-- Name: globalClusterAssociation globalClusterAssociation_globalClusterId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalClusterAssociation" + ADD CONSTRAINT "globalClusterAssociation_globalClusterId_fkey" FOREIGN KEY ("globalClusterId") REFERENCES public."globalCluster"(id) ON UPDATE CASCADE; + + +-- +-- Name: globalClusterAssociation globalClusterAssociation_governingEntityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."globalClusterAssociation" + ADD CONSTRAINT "globalClusterAssociation_governingEntityId_fkey" FOREIGN KEY ("governingEntityId") REFERENCES public."governingEntity"(id) ON UPDATE CASCADE; + + +-- +-- Name: governingEntityVersion governingEntityVersion_governingEntityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntityVersion" + ADD CONSTRAINT "governingEntityVersion_governingEntityId_fkey" FOREIGN KEY ("governingEntityId") REFERENCES public."governingEntity"(id); + + +-- +-- Name: governingEntity governingEntity_entityPrototypeId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntity" + ADD CONSTRAINT "governingEntity_entityPrototypeId_fkey" FOREIGN KEY ("entityPrototypeId") REFERENCES public."entityPrototype"(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: governingEntity governingEntity_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."governingEntity" + ADD CONSTRAINT "governingEntity_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: iatiActivity iatiActivity_iatiPublisherId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiActivity" + ADD CONSTRAINT "iatiActivity_iatiPublisherId_fkey" FOREIGN KEY ("iatiPublisherId") REFERENCES public."iatiPublisher"(id); + + +-- +-- Name: iatiFTSMap iatiFTSMap_iatiActivityID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMap" + ADD CONSTRAINT "iatiFTSMap_iatiActivityID_fkey" FOREIGN KEY ("iatiActivityID") REFERENCES public."iatiActivity"(id); + + +-- +-- Name: iatiFTSMap iatiFTSMap_iatiPublisherID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMap" + ADD CONSTRAINT "iatiFTSMap_iatiPublisherID_fkey" FOREIGN KEY ("iatiPublisherID") REFERENCES public."iatiPublisher"(id); + + +-- +-- Name: iatiFTSMatch iatiFTSMatch_iatiFTSMapID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiFTSMatch" + ADD CONSTRAINT "iatiFTSMatch_iatiFTSMapID_fkey" FOREIGN KEY ("iatiFTSMapID") REFERENCES public."iatiFTSMap"(id); + + +-- +-- Name: iatiHumanitarianScope iatiHumanitarianScope_iatiActivityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiHumanitarianScope" + ADD CONSTRAINT "iatiHumanitarianScope_iatiActivityId_fkey" FOREIGN KEY ("iatiActivityId") REFERENCES public."iatiActivity"(id); + + +-- +-- Name: iatiParticipatingOrg iatiParticipatingOrg_iatiActivityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiParticipatingOrg" + ADD CONSTRAINT "iatiParticipatingOrg_iatiActivityId_fkey" FOREIGN KEY ("iatiActivityId") REFERENCES public."iatiActivity"(id); + + +-- +-- Name: iatiRecipientCountry iatiRecipientCountry_iatiActivityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiRecipientCountry" + ADD CONSTRAINT "iatiRecipientCountry_iatiActivityId_fkey" FOREIGN KEY ("iatiActivityId") REFERENCES public."iatiActivity"(id); + + +-- +-- Name: iatiSector iatiSector_iatiActivityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiSector" + ADD CONSTRAINT "iatiSector_iatiActivityId_fkey" FOREIGN KEY ("iatiActivityId") REFERENCES public."iatiActivity"(id); + + +-- +-- Name: iatiTransaction iatiTransaction_iatiActivityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiTransaction" + ADD CONSTRAINT "iatiTransaction_iatiActivityId_fkey" FOREIGN KEY ("iatiActivityId") REFERENCES public."iatiActivity"(id); + + +-- +-- Name: iatiTransaction iatiTransaction_iatiFTSMapId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."iatiTransaction" + ADD CONSTRAINT "iatiTransaction_iatiFTSMapId_fkey" FOREIGN KEY ("iatiFTSMapId") REFERENCES public."iatiFTSMap"(id); + + +-- +-- Name: jobAssociation jobAssociation_jobId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."jobAssociation" + ADD CONSTRAINT "jobAssociation_jobId_fkey" FOREIGN KEY ("jobId") REFERENCES public.job(id); + + +-- +-- Name: legacy legacy_objectType_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.legacy + ADD CONSTRAINT "legacy_objectType_fkey" FOREIGN KEY ("objectType") REFERENCES public."flowObjectType"(type) DEFERRABLE; + + +-- +-- Name: location location_parentId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.location + ADD CONSTRAINT "location_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES public.location(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: measurementVersion measurementVersion_measurementId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."measurementVersion" + ADD CONSTRAINT "measurementVersion_measurementId_fkey" FOREIGN KEY ("measurementId") REFERENCES public.measurement(id) ON DELETE CASCADE; + + +-- +-- Name: measurement measurement_attachmentId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.measurement + ADD CONSTRAINT "measurement_attachmentId_fkey" FOREIGN KEY ("attachmentId") REFERENCES public.attachment(id) ON DELETE CASCADE; + + +-- +-- Name: measurement measurement_planReportingPeriodId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.measurement + ADD CONSTRAINT "measurement_planReportingPeriodId_fkey" FOREIGN KEY ("planReportingPeriodId") REFERENCES public."planReportingPeriod"(id) ON DELETE CASCADE; + + +-- +-- Name: objectExclude objectExclude_objectType_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."objectExclude" + ADD CONSTRAINT "objectExclude_objectType_fkey" FOREIGN KEY ("objectType") REFERENCES public."flowObjectType"(type) DEFERRABLE; + + +-- +-- Name: operationClusterVersion operationClusterVersion_modifiedBy_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationClusterVersion" + ADD CONSTRAINT "operationClusterVersion_modifiedBy_fkey" FOREIGN KEY ("modifiedBy") REFERENCES public.participant(id); + + +-- +-- Name: operationClusterVersion operationClusterVersion_root_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationClusterVersion" + ADD CONSTRAINT "operationClusterVersion_root_fkey" FOREIGN KEY (root) REFERENCES public."operationCluster"(id); + + +-- +-- Name: operationVersion operationVersion_modifiedBy_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationVersion" + ADD CONSTRAINT "operationVersion_modifiedBy_fkey" FOREIGN KEY ("modifiedBy") REFERENCES public.participant(id); + + +-- +-- Name: operationVersion operationVersion_root_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."operationVersion" + ADD CONSTRAINT "operationVersion_root_fkey" FOREIGN KEY (root) REFERENCES public.operation(id); + + +-- +-- Name: organizationLocation organizationLocation_locationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."organizationLocation" + ADD CONSTRAINT "organizationLocation_locationId_fkey" FOREIGN KEY ("locationId") REFERENCES public.location(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: organizationLocation organizationLocation_organizationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."organizationLocation" + ADD CONSTRAINT "organizationLocation_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES public.organization(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: organization organization_newOrganizationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.organization + ADD CONSTRAINT "organization_newOrganizationId_fkey" FOREIGN KEY ("newOrganizationId") REFERENCES public.organization(id); + + +-- +-- Name: participantCountry participantCountry_locationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantCountry" + ADD CONSTRAINT "participantCountry_locationId_fkey" FOREIGN KEY ("locationId") REFERENCES public.location(id); + + +-- +-- Name: participantCountry participantCountry_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantCountry" + ADD CONSTRAINT "participantCountry_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id) ON DELETE CASCADE; + + +-- +-- Name: participantOrganization participantOrganization_organizationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantOrganization" + ADD CONSTRAINT "participantOrganization_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES public.organization(id); + + +-- +-- Name: participantOrganization participantOrganization_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantOrganization" + ADD CONSTRAINT "participantOrganization_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id) ON DELETE CASCADE; + + +-- +-- Name: participantRole participantRole_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantRole" + ADD CONSTRAINT "participantRole_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: participantRole participantRole_roleId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."participantRole" + ADD CONSTRAINT "participantRole_roleId_fkey" FOREIGN KEY ("roleId") REFERENCES public.role(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: planEmergency planEmergency_emergencyId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEmergency" + ADD CONSTRAINT "planEmergency_emergencyId_fkey" FOREIGN KEY ("emergencyId") REFERENCES public.emergency(id) ON DELETE CASCADE; + + +-- +-- Name: planEmergency planEmergency_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEmergency" + ADD CONSTRAINT "planEmergency_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: planEntityVersion planEntityVersion_planEntityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntityVersion" + ADD CONSTRAINT "planEntityVersion_planEntityId_fkey" FOREIGN KEY ("planEntityId") REFERENCES public."planEntity"(id); + + +-- +-- Name: planEntity planEntity_entityPrototypeId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntity" + ADD CONSTRAINT "planEntity_entityPrototypeId_fkey" FOREIGN KEY ("entityPrototypeId") REFERENCES public."entityPrototype"(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: planEntity planEntity_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planEntity" + ADD CONSTRAINT "planEntity_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: planLocation planLocation_locationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planLocation" + ADD CONSTRAINT "planLocation_locationId_fkey" FOREIGN KEY ("locationId") REFERENCES public.location(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: planLocation planLocation_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planLocation" + ADD CONSTRAINT "planLocation_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: planReportingPeriod planReportingPeriod_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planReportingPeriod" + ADD CONSTRAINT "planReportingPeriod_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON DELETE CASCADE; + + +-- +-- Name: planTag planTag_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planTag" + ADD CONSTRAINT "planTag_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id); + + +-- +-- Name: planVersion planVersion_currentReportingPeriodId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planVersion" + ADD CONSTRAINT "planVersion_currentReportingPeriodId_fkey" FOREIGN KEY ("currentReportingPeriodId") REFERENCES public."planReportingPeriod"(id); + + +-- +-- Name: planVersion planVersion_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planVersion" + ADD CONSTRAINT "planVersion_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id); + + +-- +-- Name: planYear planYear_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planYear" + ADD CONSTRAINT "planYear_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: planYear planYear_usageYearId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."planYear" + ADD CONSTRAINT "planYear_usageYearId_fkey" FOREIGN KEY ("usageYearId") REFERENCES public."usageYear"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: procedureEntityPrototype procedureEntityPrototype_entityPrototypeId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureEntityPrototype" + ADD CONSTRAINT "procedureEntityPrototype_entityPrototypeId_fkey" FOREIGN KEY ("entityPrototypeId") REFERENCES public."entityPrototype"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: procedureEntityPrototype procedureEntityPrototype_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureEntityPrototype" + ADD CONSTRAINT "procedureEntityPrototype_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: procedureSectionField procedureSectionField_conditionFieldId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureSectionField" + ADD CONSTRAINT "procedureSectionField_conditionFieldId_fkey" FOREIGN KEY ("conditionFieldId") REFERENCES public."conditionField"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: procedureSectionField procedureSectionField_procedureSectionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureSectionField" + ADD CONSTRAINT "procedureSectionField_procedureSectionId_fkey" FOREIGN KEY ("procedureSectionId") REFERENCES public."procedureSection"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: procedureSection procedureSection_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."procedureSection" + ADD CONSTRAINT "procedureSection_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: projectVersionAttachment projectAttachment_attachmentId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionAttachment" + ADD CONSTRAINT "projectAttachment_attachmentId_fkey" FOREIGN KEY ("attachmentId") REFERENCES public.attachment(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionAttachment projectAttachment_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionAttachment" + ADD CONSTRAINT "projectAttachment_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectContact projectContact_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectContact" + ADD CONSTRAINT "projectContact_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id) ON UPDATE CASCADE; + + +-- +-- Name: projectContact projectContact_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectContact" + ADD CONSTRAINT "projectContact_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionField projectField_conditionFieldId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionField" + ADD CONSTRAINT "projectField_conditionFieldId_fkey" FOREIGN KEY ("conditionFieldId") REFERENCES public."conditionField"(id) ON UPDATE CASCADE ON DELETE RESTRICT; + + +-- +-- Name: projectGlobalClusters projectGlobalClusters_globalClusterId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectGlobalClusters" + ADD CONSTRAINT "projectGlobalClusters_globalClusterId_fkey" FOREIGN KEY ("globalClusterId") REFERENCES public."globalCluster"(id) ON DELETE CASCADE; + + +-- +-- Name: projectGlobalClusters projectGlobalClusters_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectGlobalClusters" + ADD CONSTRAINT "projectGlobalClusters_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionGoverningEntity projectGoverningEntities_governingEntityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionGoverningEntity" + ADD CONSTRAINT "projectGoverningEntities_governingEntityId_fkey" FOREIGN KEY ("governingEntityId") REFERENCES public."governingEntity"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionGoverningEntity projectGoverningEntities_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionGoverningEntity" + ADD CONSTRAINT "projectGoverningEntities_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectLocations projectLocations_locationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectLocations" + ADD CONSTRAINT "projectLocations_locationId_fkey" FOREIGN KEY ("locationId") REFERENCES public.location(id) ON DELETE CASCADE; + + +-- +-- Name: projectLocations projectLocations_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectLocations" + ADD CONSTRAINT "projectLocations_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionOrganization projectOrganizations_organizationId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionOrganization" + ADD CONSTRAINT "projectOrganizations_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES public.organization(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionOrganization projectOrganizations_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionOrganization" + ADD CONSTRAINT "projectOrganizations_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionPlanEntity projectPlanEntities_planEntityId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlanEntity" + ADD CONSTRAINT "projectPlanEntities_planEntityId_fkey" FOREIGN KEY ("planEntityId") REFERENCES public."planEntity"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionPlanEntity projectPlanEntities_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlanEntity" + ADD CONSTRAINT "projectPlanEntities_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionAttachment projectVersionAttachment_attachmentVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionAttachment" + ADD CONSTRAINT "projectVersionAttachment_attachmentVersionId_fkey" FOREIGN KEY ("attachmentVersionId") REFERENCES public."attachmentVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionComment projectVersionComment_projectVersionPlanId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionComment" + ADD CONSTRAINT "projectVersionComment_projectVersionPlanId_fkey" FOREIGN KEY ("projectVersionPlanId") REFERENCES public."projectVersionPlan"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionField projectVersionField_projectVersionPlanId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionField" + ADD CONSTRAINT "projectVersionField_projectVersionPlanId_fkey" FOREIGN KEY ("projectVersionPlanId") REFERENCES public."projectVersionPlan"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionGoverningEntity projectVersionGoverningEntity_governingEntityVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionGoverningEntity" + ADD CONSTRAINT "projectVersionGoverningEntity_governingEntityVersionId_fkey" FOREIGN KEY ("governingEntityVersionId") REFERENCES public."governingEntityVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionHistory projectVersionHistory_participantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionHistory" + ADD CONSTRAINT "projectVersionHistory_participantId_fkey" FOREIGN KEY ("participantId") REFERENCES public.participant(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: projectVersionHistory projectVersionHistory_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionHistory" + ADD CONSTRAINT "projectVersionHistory_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: projectVersionPlanEntity projectVersionPlanEntity_planEntityVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlanEntity" + ADD CONSTRAINT "projectVersionPlanEntity_planEntityVersionId_fkey" FOREIGN KEY ("planEntityVersionId") REFERENCES public."planEntityVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersionPlan projectVersionPlan_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlan" + ADD CONSTRAINT "projectVersionPlan_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: projectVersionPlan projectVersionPlan_projectVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlan" + ADD CONSTRAINT "projectVersionPlan_projectVersionId_fkey" FOREIGN KEY ("projectVersionId") REFERENCES public."projectVersion"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: projectVersionPlan projectVersionPlan_workflowStatusOptionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersionPlan" + ADD CONSTRAINT "projectVersionPlan_workflowStatusOptionId_fkey" FOREIGN KEY ("workflowStatusOptionId") REFERENCES public."workflowStatusOption"(id) ON DELETE CASCADE; + + +-- +-- Name: projectVersion projectVersion_projectId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."projectVersion" + ADD CONSTRAINT "projectVersion_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES public.project(id) ON DELETE CASCADE; + + +-- +-- Name: project project_creatorParticipantId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.project + ADD CONSTRAINT "project_creatorParticipantId_fkey" FOREIGN KEY ("creatorParticipantId") REFERENCES public.participant(id); + + +-- +-- Name: project project_currentPublishedVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.project + ADD CONSTRAINT "project_currentPublishedVersionId_fkey" FOREIGN KEY ("currentPublishedVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: project project_latestVersionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.project + ADD CONSTRAINT "project_latestVersionId_fkey" FOREIGN KEY ("latestVersionId") REFERENCES public."projectVersion"(id) ON DELETE CASCADE; + + +-- +-- Name: reportDetail reportDetail_organizationID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportDetail" + ADD CONSTRAINT "reportDetail_organizationID_fkey" FOREIGN KEY ("organizationID") REFERENCES public.organization(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: reportFile reportFile_fileAssetID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportFile" + ADD CONSTRAINT "reportFile_fileAssetID_fkey" FOREIGN KEY ("fileAssetID") REFERENCES public."fileAssetEntity"(id) ON UPDATE CASCADE; + + +-- +-- Name: reportFile reportFile_reportID_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportFile" + ADD CONSTRAINT "reportFile_reportID_fkey" FOREIGN KEY ("reportID") REFERENCES public."reportDetail"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: reportingWindowAssignmentVersion reportingWindowAssignmentVersion_modifiedBy_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowAssignmentVersion" + ADD CONSTRAINT "reportingWindowAssignmentVersion_modifiedBy_fkey" FOREIGN KEY ("modifiedBy") REFERENCES public.participant(id); + + +-- +-- Name: reportingWindowAssignmentVersion reportingWindowAssignmentVersion_root_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowAssignmentVersion" + ADD CONSTRAINT "reportingWindowAssignmentVersion_root_fkey" FOREIGN KEY (root) REFERENCES public."reportingWindowAssignment"(id); + + +-- +-- Name: reportingWindowVersion reportingWindowVersion_modifiedBy_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowVersion" + ADD CONSTRAINT "reportingWindowVersion_modifiedBy_fkey" FOREIGN KEY ("modifiedBy") REFERENCES public.participant(id); + + +-- +-- Name: reportingWindowVersion reportingWindowVersion_root_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."reportingWindowVersion" + ADD CONSTRAINT "reportingWindowVersion_root_fkey" FOREIGN KEY (root) REFERENCES public."reportingWindow"(id); + + +-- +-- Name: roleAuthenticationKey roleAuthenticationKey_roleId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."roleAuthenticationKey" + ADD CONSTRAINT "roleAuthenticationKey_roleId_fkey" FOREIGN KEY ("roleId") REFERENCES public.role(id); + + +-- +-- Name: rolePermittedAction rolePermittedAction_permittedActionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."rolePermittedAction" + ADD CONSTRAINT "rolePermittedAction_permittedActionId_fkey" FOREIGN KEY ("permittedActionId") REFERENCES public."permittedAction"(id); + + +-- +-- Name: rolePermittedAction rolePermittedAction_roleId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."rolePermittedAction" + ADD CONSTRAINT "rolePermittedAction_roleId_fkey" FOREIGN KEY ("roleId") REFERENCES public.role(id); + + +-- +-- Name: task task_requester_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.task + ADD CONSTRAINT task_requester_fkey FOREIGN KEY (requester) REFERENCES public.participant(id); + + +-- +-- Name: unit unit_unitTypeId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.unit + ADD CONSTRAINT "unit_unitTypeId_fkey" FOREIGN KEY ("unitTypeId") REFERENCES public."unitType"(id) ON UPDATE CASCADE ON DELETE SET NULL; + + +-- +-- Name: workflowRole workflowRole_roleId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowRole" + ADD CONSTRAINT "workflowRole_roleId_fkey" FOREIGN KEY ("roleId") REFERENCES public.role(id) ON UPDATE CASCADE; + + +-- +-- Name: workflowStatusOptionStep workflowStatusOptionStep_fromId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOptionStep" + ADD CONSTRAINT "workflowStatusOptionStep_fromId_fkey" FOREIGN KEY ("fromId") REFERENCES public."workflowStatusOption"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: workflowStatusOptionStep workflowStatusOptionStep_toId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOptionStep" + ADD CONSTRAINT "workflowStatusOptionStep_toId_fkey" FOREIGN KEY ("toId") REFERENCES public."workflowStatusOption"(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- Name: workflowStatusOption workflowStatusOption_planId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public."workflowStatusOption" + ADD CONSTRAINT "workflowStatusOption_planId_fkey" FOREIGN KEY ("planId") REFERENCES public.plan(id) ON UPDATE CASCADE ON DELETE CASCADE; + + +-- +-- PostgreSQL database dump complete +-- From b23d838be3f29237f6a7e9647a389a8fb546a12d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 08:47:48 +0200 Subject: [PATCH 05/67] Add utils for tests --- tests/testContext.ts | 63 +++++++++++++++++++++++++++++++++++++++ tests/utils/connection.ts | 46 ++++++++++++++++++++++++++++ tests/utils/server.ts | 62 ++++++++++++++++++++++++++++++++++++++ tsconfig.json | 3 +- 4 files changed, 173 insertions(+), 1 deletion(-) create mode 100644 tests/testContext.ts create mode 100644 tests/utils/connection.ts create mode 100644 tests/utils/server.ts diff --git a/tests/testContext.ts b/tests/testContext.ts new file mode 100644 index 00000000..87a7f4e8 --- /dev/null +++ b/tests/testContext.ts @@ -0,0 +1,63 @@ +import v4Models, { type Database } from '@unocha/hpc-api-core/src/db'; +import { type ApolloServer } from 'apollo-server-hapi'; +import type Knex from 'knex'; +import { createDbConnetion } from './utils/connection'; +import createApolloTestServer from './utils/server'; + +interface IContext { + models?: Database; + conn: Knex; + transactions: Record; + apolloTestServer: ApolloServer; +} + +export default class ContextProvider implements IContext { + private static _instance: ContextProvider; + + models?: Database; + conn: Knex; + transactions: Record; + apolloTestServer: ApolloServer; + + private constructor() { + this.models = {} as Database; + this.transactions = {}; + this.conn = {} as Knex; + this.apolloTestServer = {} as ApolloServer; + } + + public static get Instance(): ContextProvider { + if (this._instance) { + return this._instance; + } + this._instance = new ContextProvider(); + return this._instance; + } + + public async setUpContext(): Promise { + const connection = await this.createDbTestConection(); + this.conn = connection; + this.models = v4Models(this.conn); + this.apolloTestServer = await createApolloTestServer( + this.conn, + this.models + ); + } + + private async createDbTestConection(): Promise> { + return await createDbConnetion({ + db: { + poolMin: 1, + poolMax: 1, + poolIdle: 1, + connection: { + host: 'localhost', + port: 6432, + user: 'postgres', + password: 'test', + database: 'test', + }, + }, + }); + } +} diff --git a/tests/utils/connection.ts b/tests/utils/connection.ts new file mode 100644 index 00000000..cd1b1dd4 --- /dev/null +++ b/tests/utils/connection.ts @@ -0,0 +1,46 @@ +import * as t from 'io-ts'; +import Knex from 'knex'; + +const CONFIG = t.type({ + db: t.type({ + poolMin: t.number, + poolMax: t.number, + connection: t.type({ + host: t.string, + port: t.number, + user: t.string, + password: t.string, + database: t.string, + }), + poolIdle: t.number, + }), +}); + +/** + * Initialize a new Postgres provider + */ +export async function createDbConnetion(config: t.TypeOf) { + const knex = Knex({ + client: 'pg', + connection: config.db.connection, + pool: { + min: config.db.poolMin, + max: config.db.poolMax, + idleTimeoutMillis: config.db.poolIdle, + }, + acquireConnectionTimeout: 2000, + }); + + // Verify the connection before proceeding + try { + await knex.raw('SELECT now()'); + + return knex; + } catch { + throw new Error( + 'Unable to connect to Postgres via Knex. Ensure a valid connection.' + ); + } +} + +export default { createDbConnetion }; diff --git a/tests/utils/server.ts b/tests/utils/server.ts new file mode 100644 index 00000000..b18161bd --- /dev/null +++ b/tests/utils/server.ts @@ -0,0 +1,62 @@ +import * as Hapi from '@hapi/hapi'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { ApolloServerPluginLandingPageGraphQLPlayground } from 'apollo-server-core'; +import { + ApolloServer, + ApolloServerPluginStopHapiServer, +} from 'apollo-server-hapi'; +import type Knex from 'knex'; +import PlatformPath from 'node:path'; +import 'reflect-metadata'; +import { buildSchema } from 'type-graphql'; +import { Container } from 'typedi'; +import { CONFIG } from '../../config'; +import { getTokenFromRequest } from '../../src/common-libs/auth'; + +export default async function createApolloTestServer( + connection: Knex, + models: Database, + auth?: boolean +) { + const schema = await buildSchema({ + resolvers: [ + PlatformPath.join( + __dirname, + '../../src/domain-services/**/resolver.{ts,js}' + ), + ], + container: Container, // Register the 3rd party IOC container + }); + + const hapiServer = Hapi.server({ + port: CONFIG.httpPort, + app: { + config: CONFIG, + connection, + }, + }); + + const apolloServerConfig = { + connection, + models, + config: CONFIG, + }; + + return new ApolloServer({ + schema, + context: ({ request }: { request: Hapi.Request }) => ({ + ...apolloServerConfig, + token: auth ? getTokenFromRequest(request) : undefined, + }), + plugins: [ + ApolloServerPluginStopHapiServer({ hapiServer }), + /** + * Don't use sandbox explorer hosted on https://studio.apollographql.com + * but use local sandbox instead. Even though GraphQL playground is + * retired, it is much more useful for local development + * https://github.com/graphql/graphql-playground/issues/1143 + */ + ApolloServerPluginLandingPageGraphQLPlayground(), + ], + }); +} diff --git a/tsconfig.json b/tsconfig.json index d39d5a01..d4caaf1b 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -22,7 +22,8 @@ "src/**/*.js", "start.js", "bin", - "jest.config.ts" + "jest.config.ts", + "tests/**/*.ts" ], "exclude": ["node_modules"] } From 348a653fa9d282cac231c991a8d3a90a157a8a6a Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 08:50:28 +0200 Subject: [PATCH 06/67] Add tests setup and example tests --- tests/resolvers/software-info.spec.ts | 78 +++++++++++++++++++++++++++ tests/test-environment-setup.ts | 5 ++ tests/test-environment.spec.ts | 30 +++++++++++ 3 files changed, 113 insertions(+) create mode 100644 tests/resolvers/software-info.spec.ts create mode 100644 tests/test-environment-setup.ts create mode 100644 tests/test-environment.spec.ts diff --git a/tests/resolvers/software-info.spec.ts b/tests/resolvers/software-info.spec.ts new file mode 100644 index 00000000..0e051baf --- /dev/null +++ b/tests/resolvers/software-info.spec.ts @@ -0,0 +1,78 @@ +// For clarity in this example we included our typeDefs and resolvers above our test, + +import { version } from '../../package.json'; +import ContextProvider from '../testContext'; + +describe('Query should return Software info', () => { + it('All data should be returned', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: 'query { softwareInfo { title status version } }', + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + const data = response.data as any; + expect(data.softwareInfo).toBeDefined(); + expect(data.softwareInfo.length).toBeGreaterThan(0); + const softwareInfo = data.softwareInfo[0]; + expect(softwareInfo.version).toBe(version); + expect(softwareInfo.title).toBeDefined(); + expect(softwareInfo.status).toBeDefined(); + }); + + it('Only version should be returned', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: 'query { softwareInfo { version } }', + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + const data = response.data as any; + expect(data.softwareInfo).toBeDefined(); + expect(data.softwareInfo.length).toBeGreaterThan(0); + const softwareInfo = data.softwareInfo[0]; + expect(softwareInfo.version).toBe(version); + expect(softwareInfo.title).toBeUndefined(); + expect(softwareInfo.status).toBeUndefined(); + }); + + it('Only title should be returned', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: 'query { softwareInfo { title } }', + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + const data = response.data as any; + expect(data.softwareInfo).toBeDefined(); + expect(data.softwareInfo.length).toBeGreaterThan(0); + const softwareInfo = data.softwareInfo[0]; + expect(softwareInfo.title).toBeDefined(); + expect(softwareInfo.version).toBeUndefined(); + expect(softwareInfo.status).toBeUndefined(); + }); + + it('Only status should be returned', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: 'query { softwareInfo { status } }', + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + const data = response.data as any; + expect(data.softwareInfo).toBeDefined(); + expect(data.softwareInfo.length).toBeGreaterThan(0); + const softwareInfo = data.softwareInfo[0]; + expect(softwareInfo.status).toBeDefined(); + expect(softwareInfo.version).toBeUndefined(); + expect(softwareInfo.title).toBeUndefined(); + }); +}); diff --git a/tests/test-environment-setup.ts b/tests/test-environment-setup.ts new file mode 100644 index 00000000..f6d566c5 --- /dev/null +++ b/tests/test-environment-setup.ts @@ -0,0 +1,5 @@ +import ContextProvider from './testContext'; + +beforeAll(async () => { + await ContextProvider.Instance.setUpContext(); +}); diff --git a/tests/test-environment.spec.ts b/tests/test-environment.spec.ts new file mode 100644 index 00000000..1d237e80 --- /dev/null +++ b/tests/test-environment.spec.ts @@ -0,0 +1,30 @@ +import ContextProvider from './testContext'; + +const context = ContextProvider.Instance; + +describe('Ensure test Environment is setup correctly', () => { + it('should be able to run a test', () => { + expect(true).toBe(true); + }); +}); + +describe('ContextProvider should be defined', () => { + it('should be defined', () => { + expect(context).toBeDefined(); + }); + + it('should have a connection', () => { + expect(context.conn).toBeDefined(); + }); + + it('connection should be a knex connection', () => { + expect(context.conn).toBeDefined(); + expect(context.conn?.client.config.client).toEqual('pg'); + }); + + it('knex connection should be connected', async () => { + expect(context.conn).toBeDefined(); + const res = await context.conn?.raw('SELECT NOW()'); + expect(res.rows[0].now).toBeDefined(); + }); +}); From cbe05c06ec37292ae311a1d2786229f91acc4cdd Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 08:54:18 +0200 Subject: [PATCH 07/67] Add run files for VSCode --- .gitignore | 1 - .vscode/launch.json | 18 ++++++++++++++++++ .vscode/tasks.json | 13 +++++++++++++ bin/test.sh | 22 +++++++++++++--------- 4 files changed, 44 insertions(+), 10 deletions(-) create mode 100644 .vscode/launch.json create mode 100644 .vscode/tasks.json diff --git a/.gitignore b/.gitignore index ab4a50b5..07f01588 100644 --- a/.gitignore +++ b/.gitignore @@ -17,6 +17,5 @@ package-lock.json test-data/* data/* .env -.vscode *.sublime-project *.sublime-workspace diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..7bb1de3a --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,18 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Debug Jest Tests", + "type": "node", + "preLaunchTask": "start-containers", + "request": "launch", + "runtimeArgs": [ + "--inspect-brk", + "${workspaceRoot}/node_modules/.bin/jest", + "--runInBand" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen" + } + ] +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..7130fa7c --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,13 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "tasks": [ + { + "label": "start-containers", + "type": "shell", + "command": "${workspaceRoot}/bin/test.sh", + "args": ["-oc"] + } + ] +} diff --git a/bin/test.sh b/bin/test.sh index b2dc31ac..78e62e65 100755 --- a/bin/test.sh +++ b/bin/test.sh @@ -1,12 +1,12 @@ root=$(pwd) #Global variables -USAGE='this is the usage' -DEBUG_USAGE='this is the debug usage' +USAGE='Usage: test.sh [options] [-- [options]].\n Options:\n -oc, --only-containers: only start docker containers\n -sc, --stop-containers: stop docker containers\n -k, --keep: keep jest runing after the completion of the tests suites\n -c: run tests with coverage\n -h, --help: show this help message\n --: pass extra options' KEEP=0 +FORCE_STOP_JEST='--forceExit' ONLY_CONTAINERS=0 STOP_CONTAINERSq=0 -COMMAND_ARGS='--' +COMMAND_ARGS='' function moveToTestDir { echo 'Moving to tests dir' @@ -21,22 +21,21 @@ function moveToRootDir { ## obtain options while [ "$1" != "" ]; do case $1 in - -d | --debug ) echo "Debug usage" - echo "$DEBUG_USAGE" - exit 0 - ;; -oc | --only-containers ) ONLY_CONTAINERS=1 ;; -sc | --stop-containers ) STOP_CONTAINERS=1 ;; -k | --keep ) KEEP=1 ;; + -c) shift + COMMAND_ARGS="${COMMAND_ARGS} --coverage" + ;; -h | --help ) echo "$USAGE" exit ;; --) shift while [ "$1" != "" ]; do - COMMAND_ARGS="${COMMAND_ARGS} $1" + COMMAND_ARGS="${COMMAND_ARGS} -- $1" shift done ;; @@ -73,7 +72,12 @@ fi ## run tests echo 'Running tests' moveToRootDir -yarn jest + +if [ $KEEP -eq 0 ]; then + FORCE_STOP_JEST='' +fi + +yarn jest $COMMAND_ARGS $FORCE_STOP_JEST if [ $KEEP -eq 0 ]; then ## stop docker containers From e999d1635a739fc3b0ed725ebe7009c3947cc014 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 17 Oct 2023 08:48:04 +0200 Subject: [PATCH 08/67] Add hpc-hid info --- tools/hid_api_mock/package.json | 1 + tools/hid_api_mock/server.js | 1 + 2 files changed, 2 insertions(+) create mode 120000 tools/hid_api_mock/package.json create mode 120000 tools/hid_api_mock/server.js diff --git a/tools/hid_api_mock/package.json b/tools/hid_api_mock/package.json new file mode 120000 index 00000000..56c37479 --- /dev/null +++ b/tools/hid_api_mock/package.json @@ -0,0 +1 @@ +/srv/example/package.json \ No newline at end of file diff --git a/tools/hid_api_mock/server.js b/tools/hid_api_mock/server.js new file mode 120000 index 00000000..e913c5a8 --- /dev/null +++ b/tools/hid_api_mock/server.js @@ -0,0 +1 @@ +/srv/example/server.js \ No newline at end of file From 1e54f71db3ebd9f3a7e4411f3dfbae5ecac27ab5 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 23 Oct 2023 08:40:03 +0200 Subject: [PATCH 09/67] Create basic structure for GraphQL Flows resolver Add test file for flow resolver --- src/domain-services/flows/flow-service.ts | 113 ++++++++++++++++++ src/domain-services/flows/graphql/resolver.ts | 49 ++++++++ src/domain-services/flows/graphql/types.ts | 110 +++++++++++++++++ tests/resolvers/flows.spec.ts | 22 ++++ 4 files changed, 294 insertions(+) create mode 100644 src/domain-services/flows/flow-service.ts create mode 100644 src/domain-services/flows/graphql/resolver.ts create mode 100644 src/domain-services/flows/graphql/types.ts create mode 100644 tests/resolvers/flows.spec.ts diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts new file mode 100644 index 00000000..58db60f9 --- /dev/null +++ b/src/domain-services/flows/flow-service.ts @@ -0,0 +1,113 @@ +import { Service } from 'typedi'; +import { FlowSearchResult, FlowSortField } from './graphql/types'; +import { Database } from '@unocha/hpc-api-core/src/db/type'; +import { Brand, createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +@Service() +export class FlowService { + async search( + models: Database, + first: number, + afterCursor: string, + sortField: FlowSortField, + sortOrder: 'asc' | 'desc' + ): Promise { + let afterIndex = 0; + + const sortCondition = { + column: sortField ?? 'id', + order: sortOrder ?? 'ASC', + }; + + let flows = await models.flow.find({ + orderBy: sortCondition, + }); + const count = flows.length; + + if (afterCursor) { + const after = flows.findIndex( + (flow) => flow.id.toString() === afterCursor + ); + if (after < 0) { + throw new Error('Cursor not found'); + } + afterIndex = after + 1; + } + + const pagedData = flows.slice(afterIndex, afterIndex + first); + const edges = await Promise.all( + pagedData.map(async (flow) => { + const categories: string[] = await this.getFlowCategories(flow, models); + + return { + node: { + id: flow.id.valueOf(), + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt, + category: categories[0], + }, + cursor: flow.id.toString(), + }; + }) + ); + + return { + edges, + pageInfo: { + hasNextPage: count > first, + hasPreviousPage: afterIndex > 0, + startCursor: pagedData.length ? pagedData[0].id.toString() : '', + endCursor: pagedData.length + ? pagedData[pagedData.length - 1].id.toString() + : '', + pageSize: pagedData.length, + sortField: sortCondition.column, + sortOrder: sortCondition.order, + }, + totalCount: count, + }; + } + + private async getFlowTypeCategory(models: Database): Promise { + return models.category.find({ + where: { group: 'flowType', name: 'Parked' }, + }); + } + + private async getFlowCategories(flow: any, models: Database): Promise { + const flowIdBranded = createBrandedValue(flow.id); + const flowLinks = await models.flowLink.find({ + where: { + childID: flowIdBranded, + }, + }); + + //const flowTypeCategory = await this.getFlowTypeCategory(models); + const flowLinksBrandedIds = flowLinks.map((flowLink) => + createBrandedValue(flowLink.parentID) + ); + + const categoriesRef = await models.categoryRef.find({ + where: { + objectID: { + [Op.IN]: flowLinksBrandedIds, + }, + versionID: flow.versionID, + categoryID: createBrandedValue(1051), + }, + }); + + const categories = await models.category.find({ + where: { + id: { + [Op.IN]: categoriesRef.map((catRef) => catRef.categoryID), + }, + }, + }); + + return categories.map((cat) => { + return cat.name; + }); + } +} diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts new file mode 100644 index 00000000..baebb962 --- /dev/null +++ b/src/domain-services/flows/graphql/resolver.ts @@ -0,0 +1,49 @@ +import Flow, { FlowSearchResult, FlowSortField } from './types'; +import { Service } from 'typedi'; +import { Arg, Args, Ctx, Query, Resolver } from 'type-graphql'; +import { FlowService } from '../flow-service'; +import Context from '../../Context'; + +@Service() +@Resolver(Flow) +export default class FlowResolver { + constructor(private flowService: FlowService) {} + + @Query(() => FlowSearchResult) + async searchFlows( + @Ctx() context: Context, + @Arg('first', { nullable: false }) first: number, + @Arg('afterCursor', { nullable: true }) afterCursor: string, + @Arg('sortField', { nullable: true }) + sortField: + | 'id' + | 'amountUSD' + | 'versionID' + | 'activeStatus' + | 'restricted' + | 'newMoney' + | 'flowDate' + | 'decisionDate' + | 'firstReportedDate' + | 'budgetYear' + | 'origAmount' + | 'origCurrency' + | 'exchangeRate' + | 'description' + | 'notes' + | 'versionStartDate' + | 'versionEndDate' + | 'createdAt' + | 'updatedAt' + | 'deletedAt', + @Arg('sortOrder', { nullable: true }) sortOrder: 'asc' | 'desc' + ): Promise { + return await this.flowService.search( + context.models, + first, + afterCursor, + sortField, + sortOrder + ); + } +} diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts new file mode 100644 index 00000000..c99642c0 --- /dev/null +++ b/src/domain-services/flows/graphql/types.ts @@ -0,0 +1,110 @@ +import { Field, ObjectType } from 'type-graphql'; + +@ObjectType() +export default class Flow { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + amountUSD: string; + + @Field({ nullable: false }) + createdAt: Date; + + @Field({ nullable: true }) + category: string; +} + +@ObjectType() +export class FlowCategory { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + name: string; +} + +@ObjectType() +export class FlowEdge { + @Field({ nullable: false }) + node: Flow; + + @Field({ nullable: false }) + cursor: string; +} +@ObjectType() +export class PageInfo { + @Field({ nullable: false }) + hasNextPage: boolean; + + @Field({ nullable: false }) + hasPreviousPage: boolean; + + @Field({ nullable: false }) + startCursor: string; + + @Field({ nullable: false }) + endCursor: string; + + @Field({ nullable: false }) + pageSize: number; + + @Field({ nullable: false }) + sortField: + | 'id' + | 'amountUSD' + | 'versionID' + | 'activeStatus' + | 'restricted' + | 'newMoney' + | 'flowDate' + | 'decisionDate' + | 'firstReportedDate' + | 'budgetYear' + | 'origAmount' + | 'origCurrency' + | 'exchangeRate' + | 'description' + | 'notes' + | 'versionStartDate' + | 'versionEndDate' + | 'createdAt' + | 'updatedAt' + | 'deletedAt'; + + @Field({ nullable: false }) + sortOrder: string; +} +@ObjectType() +export class FlowSearchResult { + @Field(() => [FlowEdge], { nullable: false }) + edges: FlowEdge[]; + + @Field(() => PageInfo, { nullable: false }) + pageInfo: PageInfo; + + @Field({ nullable: false }) + totalCount: number; +} + +export type FlowSortField = + | 'id' + | 'amountUSD' + | 'versionID' + | 'activeStatus' + | 'restricted' + | 'newMoney' + | 'flowDate' + | 'decisionDate' + | 'firstReportedDate' + | 'budgetYear' + | 'origAmount' + | 'origCurrency' + | 'exchangeRate' + | 'description' + | 'notes' + | 'versionStartDate' + | 'versionEndDate' + | 'createdAt' + | 'updatedAt' + | 'deletedAt'; diff --git a/tests/resolvers/flows.spec.ts b/tests/resolvers/flows.spec.ts new file mode 100644 index 00000000..53f9957d --- /dev/null +++ b/tests/resolvers/flows.spec.ts @@ -0,0 +1,22 @@ +import ContextProvider from '../testContext'; + + +describe('Query should return Flow search', () => { + + it('All data should be returned', async () => { + const response = await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: 'query { searchFlows (first:10) { totalCount edges { node { id createdAt amountUSD category } cursor } pageInfo { startCursor hasNextPage endCursor hasPreviousPage pageSize } } }', + + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + const data = response.data as any; + expect(data.flows).toBeDefined(); + expect(data.flows.length).toBeGreaterThan(0); + const flows = data.flows[0]; + expect(flows.id).toBeDefined(); + }); +}); + From ef9b56cf947082d60b47326cd59602ef0e03f842 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 26 Oct 2023 11:12:11 +0200 Subject: [PATCH 10/67] Minor refactor --- src/domain-services/flows/flow-service.ts | 10 ++++++++-- src/server.ts | 6 +++++- tests/testContext.ts | 6 +++--- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 58db60f9..514d5ed6 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -4,6 +4,8 @@ import { Database } from '@unocha/hpc-api-core/src/db/type'; import { Brand, createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; + +import { dbConnection } from '../../server'; @Service() export class FlowService { async search( @@ -22,8 +24,9 @@ export class FlowService { let flows = await models.flow.find({ orderBy: sortCondition, + limit: first, }); - const count = flows.length; + const count = await dbConnection.raw('SELECT COUNT(*) FROM flow'); if (afterCursor) { const after = flows.findIndex( @@ -75,7 +78,10 @@ export class FlowService { }); } - private async getFlowCategories(flow: any, models: Database): Promise { + private async getFlowCategories( + flow: any, + models: Database + ): Promise { const flowIdBranded = createBrandedValue(flow.id); const flowLinks = await models.flowLink.find({ where: { diff --git a/src/server.ts b/src/server.ts index ca85de88..059eea0c 100644 --- a/src/server.ts +++ b/src/server.ts @@ -30,6 +30,8 @@ declare module '@hapi/hapi' { } } +let dbConnection: Knex; + async function startServer() { const rootLogContext = await initializeLogging(); @@ -38,7 +40,7 @@ async function startServer() { container: Container, // Register the 3rd party IOC container }); - const dbConnection = await createDbConnetion(); + dbConnection = await createDbConnetion(); const hapiServer = Hapi.server({ port: CONFIG.httpPort, @@ -79,3 +81,5 @@ async function startServer() { } startServer().catch((error) => console.error(error)); + +export { dbConnection }; diff --git a/tests/testContext.ts b/tests/testContext.ts index 87a7f4e8..8248521a 100644 --- a/tests/testContext.ts +++ b/tests/testContext.ts @@ -52,10 +52,10 @@ export default class ContextProvider implements IContext { poolIdle: 1, connection: { host: 'localhost', - port: 6432, + port: 5432, user: 'postgres', - password: 'test', - database: 'test', + password: '', + database: 'hpc', }, }, }); From 39a1fc8abe4efda90f909236962c57e594dc7337 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 27 Oct 2023 10:51:27 +0200 Subject: [PATCH 11/67] Add fetch for flow categories --- src/domain-services/flows/flow-service.ts | 34 +++++++++------------- src/domain-services/flows/graphql/types.ts | 20 ++++++++----- 2 files changed, 26 insertions(+), 28 deletions(-) diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 514d5ed6..a4110c25 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -1,11 +1,10 @@ import { Service } from 'typedi'; -import { FlowSearchResult, FlowSortField } from './graphql/types'; +import { FlowCategory, FlowSearchResult, FlowSortField } from './graphql/types'; import { Database } from '@unocha/hpc-api-core/src/db/type'; import { Brand, createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { dbConnection } from '../../server'; @Service() export class FlowService { async search( @@ -19,14 +18,15 @@ export class FlowService { const sortCondition = { column: sortField ?? 'id', - order: sortOrder ?? 'ASC', + order: sortOrder ?? 'DESC', }; let flows = await models.flow.find({ orderBy: sortCondition, limit: first, }); - const count = await dbConnection.raw('SELECT COUNT(*) FROM flow'); + const countRes = await models.flow.count(); + const count = countRes[0] as { count: number }; if (afterCursor) { const after = flows.findIndex( @@ -41,14 +41,14 @@ export class FlowService { const pagedData = flows.slice(afterIndex, afterIndex + first); const edges = await Promise.all( pagedData.map(async (flow) => { - const categories: string[] = await this.getFlowCategories(flow, models); + const categories: FlowCategory[] = await this.getFlowCategories(flow, models); return { node: { id: flow.id.valueOf(), amountUSD: flow.amountUSD.toString(), createdAt: flow.createdAt, - category: categories[0], + categories: categories, }, cursor: flow.id.toString(), }; @@ -58,7 +58,7 @@ export class FlowService { return { edges, pageInfo: { - hasNextPage: count > first, + hasNextPage: count.count > afterIndex, hasPreviousPage: afterIndex > 0, startCursor: pagedData.length ? pagedData[0].id.toString() : '', endCursor: pagedData.length @@ -68,20 +68,14 @@ export class FlowService { sortField: sortCondition.column, sortOrder: sortCondition.order, }, - totalCount: count, + totalCount: count.count, }; } - private async getFlowTypeCategory(models: Database): Promise { - return models.category.find({ - where: { group: 'flowType', name: 'Parked' }, - }); - } - private async getFlowCategories( flow: any, models: Database - ): Promise { + ): Promise { const flowIdBranded = createBrandedValue(flow.id); const flowLinks = await models.flowLink.find({ where: { @@ -89,7 +83,6 @@ export class FlowService { }, }); - //const flowTypeCategory = await this.getFlowTypeCategory(models); const flowLinksBrandedIds = flowLinks.map((flowLink) => createBrandedValue(flowLink.parentID) ); @@ -100,7 +93,6 @@ export class FlowService { [Op.IN]: flowLinksBrandedIds, }, versionID: flow.versionID, - categoryID: createBrandedValue(1051), }, }); @@ -112,8 +104,10 @@ export class FlowService { }, }); - return categories.map((cat) => { - return cat.name; - }); + return categories.map((cat) => ({ + id: cat.id, + name: cat.name, + group: cat.group, + })); } } diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index c99642c0..9407db55 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -1,27 +1,30 @@ import { Field, ObjectType } from 'type-graphql'; @ObjectType() -export default class Flow { +export class FlowCategory { @Field({ nullable: false }) id: number; @Field({ nullable: false }) - amountUSD: string; + name: string; @Field({ nullable: false }) - createdAt: Date; - - @Field({ nullable: true }) - category: string; + group: string; } @ObjectType() -export class FlowCategory { +export default class Flow { @Field({ nullable: false }) id: number; @Field({ nullable: false }) - name: string; + amountUSD: string; + + @Field({ nullable: false }) + createdAt: Date; + + @Field(() => [FlowCategory], { nullable: false }) + categories: FlowCategory[]; } @ObjectType() @@ -32,6 +35,7 @@ export class FlowEdge { @Field({ nullable: false }) cursor: string; } + @ObjectType() export class PageInfo { @Field({ nullable: false }) From 98fc683644ad1243238eb72f811329d61511ee6b Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 27 Oct 2023 11:48:58 +0200 Subject: [PATCH 12/67] Add query fetching nested properties --- src/domain-services/flows/flow-service.ts | 148 ++++++++++++++++++++- src/domain-services/flows/graphql/types.ts | 51 +++++++ 2 files changed, 197 insertions(+), 2 deletions(-) diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index a4110c25..6f6d3753 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -1,5 +1,13 @@ import { Service } from 'typedi'; -import { FlowCategory, FlowSearchResult, FlowSortField } from './graphql/types'; +import { + FlowCategory, + FlowLocation, + FlowOrganization, + FlowPlan, + FlowSearchResult, + FlowSortField, + FlowUsageYear, +} from './graphql/types'; import { Database } from '@unocha/hpc-api-core/src/db/type'; import { Brand, createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @@ -41,7 +49,49 @@ export class FlowService { const pagedData = flows.slice(afterIndex, afterIndex + first); const edges = await Promise.all( pagedData.map(async (flow) => { - const categories: FlowCategory[] = await this.getFlowCategories(flow, models); + const flowIdBranded = createBrandedValue(flow.id); + + const categories: FlowCategory[] = await this.getFlowCategories( + flow, + models + ); + + const flowObjects = await models.flowObject.find({ + where: { + flowID: flowIdBranded, + }, + }); + + const organizationsFO: any[] = []; + const locationsFO: any[] = []; + const plansFO: any[] = []; + const usageYearsFO: any[] = []; + + flowObjects.forEach((flowObject) => { + if (flowObject.objectType === 'organization') { + organizationsFO.push(flowObject); + } else if (flowObject.objectType === 'location') { + locationsFO.push(flowObject); + } else if (flowObject.objectType === 'plan') { + plansFO.push(flowObject); + } else if (flowObject.objectType === 'usageYear') { + usageYearsFO.push(flowObject); + } + }); + + const organizations: FlowOrganization[] = await this.getOrganizations( + organizationsFO, + models + ); + + const locations: FlowLocation[] = await this.getLocations( + locationsFO, + models + ); + + const plans = await this.getPlans(plansFO, models); + + const usageYears = await this.getUsageYears(usageYearsFO, models); return { node: { @@ -49,6 +99,10 @@ export class FlowService { amountUSD: flow.amountUSD.toString(), createdAt: flow.createdAt, categories: categories, + organizations: organizations, + locations: locations, + plans: plans, + usageYears: usageYears, }, cursor: flow.id.toString(), }; @@ -110,4 +164,94 @@ export class FlowService { group: cat.group, })); } + + private async getOrganizations( + organizationsFO: any[], + models: Database + ): Promise { + const organizations = await models.organization.find({ + where: { + id: { + [Op.IN]: organizationsFO.map((orgFO) => orgFO.objectID), + }, + }, + }); + + return organizations.map((org) => ({ + id: org.id, + refDirection: organizationsFO.find((orgFO) => orgFO.objectID === org.id) + .refDirection, + name: org.name, + })); + } + + private async getLocations( + locationsFO: any[], + models: Database + ): Promise { + const locations = await models.location.find({ + where: { + id: { + [Op.IN]: locationsFO.map((locFO) => locFO.objectID), + }, + }, + }); + + return locations.map((loc) => ({ + id: loc.id.valueOf(), + name: loc.name!, + })); + } + + private async getPlans( + plansFO: any[], + models: Database + ): Promise { + const plans = await models.plan.find({ + where: { + id: { + [Op.IN]: plansFO.map((planFO) => planFO.objectID), + }, + }, + }); + + const flowPlans: FlowPlan[] = []; + + for (const plan of plans) { + const planVersion = await models.planVersion.find({ + where: { + planId: plan.id, + currentVersion: true, + }, + }); + + flowPlans.push({ + id: plan.id.valueOf(), + name: planVersion[0].name, + }); + } + + return flowPlans; + } + + private async getUsageYears( + usageYearsFO: any[], + models: Database + ): Promise { + const usageYears = await models.usageYear.find({ + where: { + id: { + [Op.IN]: usageYearsFO.map((usageYearFO) => usageYearFO.objectID), + }, + }, + }); + + return usageYears.map((usageYear) => ({ + year: usageYear.year, + direction: usageYearsFO.find( + (usageYearFO) => usageYearFO.objectID === usageYear.id + ).refDirection, + })); + } + } diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 9407db55..f8864e51 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -12,6 +12,45 @@ export class FlowCategory { group: string; } +@ObjectType() +export class FlowOrganization { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + refDirection: string; + + @Field({ nullable: false }) + name: string; +} + +@ObjectType() +export class FlowLocation { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + name: string; +} + +@ObjectType() +export class FlowPlan { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + name: string; +} + +@ObjectType() +export class FlowUsageYear { + @Field({ nullable: false }) + year: string; + + @Field({ nullable: false }) + direction: string; +} + @ObjectType() export default class Flow { @Field({ nullable: false }) @@ -25,6 +64,18 @@ export default class Flow { @Field(() => [FlowCategory], { nullable: false }) categories: FlowCategory[]; + + @Field(() => [FlowOrganization], { nullable: false }) + organizations: FlowOrganization[]; + + @Field(() => [FlowLocation], { nullable: false }) + locations: FlowLocation[]; + + @Field(() => [FlowPlan], { nullable: false }) + plans: FlowPlan[]; + + @Field(() => [FlowUsageYear], { nullable: false }) + usageYears: FlowUsageYear[]; } @ObjectType() From 9495ef1d556f8f7f6b6d249188a9f99252c8e5e0 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 27 Oct 2023 12:22:57 +0200 Subject: [PATCH 13/67] Refactor move methods to individual services --- src/domain-services/flows/flow-service.ts | 117 +++--------------- .../location/location-service.ts | 20 +++ .../organizations/organization-service.ts | 24 ++++ src/domain-services/plans/plan-service.ts | 33 +++++ .../usage-years/usage-year-service.ts | 27 ++++ 5 files changed, 124 insertions(+), 97 deletions(-) create mode 100644 src/domain-services/organizations/organization-service.ts create mode 100644 src/domain-services/usage-years/usage-year-service.ts diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 6f6d3753..0a63abe2 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -9,12 +9,20 @@ import { FlowUsageYear, } from './graphql/types'; import { Database } from '@unocha/hpc-api-core/src/db/type'; -import { Brand, createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { OrganizationService } from '../organizations/organization-service'; +import { LocationService } from '../location/location-service'; +import { PlanService } from '../plans/plan-service'; +import { UsageYearService } from '../usage-years/usage-year-service'; @Service() export class FlowService { + constructor(private readonly organizationService: OrganizationService, + private readonly locationService: LocationService, + private readonly planService: PlanService, + private readonly usageYearService: UsageYearService) {} + async search( models: Database, first: number, @@ -49,18 +57,12 @@ export class FlowService { const pagedData = flows.slice(afterIndex, afterIndex + first); const edges = await Promise.all( pagedData.map(async (flow) => { - const flowIdBranded = createBrandedValue(flow.id); - const categories: FlowCategory[] = await this.getFlowCategories( flow, models ); - const flowObjects = await models.flowObject.find({ - where: { - flowID: flowIdBranded, - }, - }); + const flowObjects = await this.getFlowObjects(flow, models); const organizationsFO: any[] = []; const locationsFO: any[] = []; @@ -79,19 +81,19 @@ export class FlowService { } }); - const organizations: FlowOrganization[] = await this.getOrganizations( + const organizations: FlowOrganization[] = await this.organizationService.getFlowObjectOrganizations( organizationsFO, models ); - const locations: FlowLocation[] = await this.getLocations( + const locations: FlowLocation[] = await this.locationService.getFlowObjectLocations( locationsFO, models ); - const plans = await this.getPlans(plansFO, models); + const plans = await this.planService.getFlowObjectPlans(plansFO, models); - const usageYears = await this.getUsageYears(usageYearsFO, models); + const usageYears = await this.usageYearService.getFlowObjectUsageYears(usageYearsFO, models); return { node: { @@ -165,93 +167,14 @@ export class FlowService { })); } - private async getOrganizations( - organizationsFO: any[], - models: Database - ): Promise { - const organizations = await models.organization.find({ - where: { - id: { - [Op.IN]: organizationsFO.map((orgFO) => orgFO.objectID), - }, - }, - }); - - return organizations.map((org) => ({ - id: org.id, - refDirection: organizationsFO.find((orgFO) => orgFO.objectID === org.id) - .refDirection, - name: org.name, - })); - } - - private async getLocations( - locationsFO: any[], - models: Database - ): Promise { - const locations = await models.location.find({ - where: { - id: { - [Op.IN]: locationsFO.map((locFO) => locFO.objectID), - }, - }, - }); - - return locations.map((loc) => ({ - id: loc.id.valueOf(), - name: loc.name!, - })); - } - - private async getPlans( - plansFO: any[], - models: Database - ): Promise { - const plans = await models.plan.find({ - where: { - id: { - [Op.IN]: plansFO.map((planFO) => planFO.objectID), - }, - }, - }); - - const flowPlans: FlowPlan[] = []; - - for (const plan of plans) { - const planVersion = await models.planVersion.find({ - where: { - planId: plan.id, - currentVersion: true, - }, - }); - - flowPlans.push({ - id: plan.id.valueOf(), - name: planVersion[0].name, - }); - } - - return flowPlans; - } - - private async getUsageYears( - usageYearsFO: any[], - models: Database - ): Promise { - const usageYears = await models.usageYear.find({ + private async getFlowObjects(flow: any, models: Database): Promise { + const flowIdBranded = createBrandedValue(flow.id); + const flowObjects = await models.flowObject.find({ where: { - id: { - [Op.IN]: usageYearsFO.map((usageYearFO) => usageYearFO.objectID), - }, + flowID: flowIdBranded, }, }); - return usageYears.map((usageYear) => ({ - year: usageYear.year, - direction: usageYearsFO.find( - (usageYearFO) => usageYearFO.objectID === usageYear.id - ).refDirection, - })); + return flowObjects; } - } diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index 67d78892..678aa102 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -2,6 +2,8 @@ import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; +import { FlowLocation } from '../flows/graphql/types'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @Service() export class LocationService { @@ -26,4 +28,22 @@ export class LocationService { where: { name: { [models.Op.ILIKE]: `%${name}%` } }, }); } + + async getFlowObjectLocations( + locationsFO: any[], + models: Database + ): Promise { + const locations = await models.location.find({ + where: { + id: { + [Op.IN]: locationsFO.map((locFO) => locFO.objectID), + }, + }, + }); + + return locations.map((loc) => ({ + id: loc.id.valueOf(), + name: loc.name!, + })); + } } diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts new file mode 100644 index 00000000..36e5a525 --- /dev/null +++ b/src/domain-services/organizations/organization-service.ts @@ -0,0 +1,24 @@ +import { Database } from "@unocha/hpc-api-core/src/db"; +import { Service } from "typedi"; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; + +@Service() +export class OrganizationService { + + async getFlowObjectOrganizations(organizationsFO: any[], models: Database){ + const organizations = await models.organization.find({ + where: { + id: { + [Op.IN]: organizationsFO.map((orgFO) => orgFO.objectID), + }, + }, + }); + + return organizations.map((org) => ({ + id: org.id, + refDirection: organizationsFO.find((orgFO) => orgFO.objectID === org.id) + .refDirection, + name: org.name, + })); + } +} \ No newline at end of file diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index f7321b41..bea906f5 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -1,8 +1,10 @@ import { type PlanId } from '@unocha/hpc-api-core/src/db/models/plan'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { NotFoundError } from '@unocha/hpc-api-core/src/util/error'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; +import { FlowPlan } from '../flows/graphql/types'; @Service() export class PlanService { @@ -44,4 +46,35 @@ export class PlanService { return years.map((y) => y.year); } + + async getFlowObjectPlans( + plansFO: any[], + models: Database + ): Promise { + const plans = await models.plan.find({ + where: { + id: { + [Op.IN]: plansFO.map((planFO) => planFO.objectID), + }, + }, + }); + + const flowPlans: FlowPlan[] = []; + + for (const plan of plans) { + const planVersion = await models.planVersion.find({ + where: { + planId: plan.id, + currentVersion: true, + }, + }); + + flowPlans.push({ + id: plan.id.valueOf(), + name: planVersion[0].name, + }); + } + + return flowPlans; + } } diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts new file mode 100644 index 00000000..7712f235 --- /dev/null +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -0,0 +1,27 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Service } from 'typedi'; +import { FlowUsageYear } from '../flows/graphql/types'; + +@Service() +export class UsageYearService { + async getFlowObjectUsageYears( + usageYearsFO: any[], + models: Database + ): Promise { + const usageYears = await models.usageYear.find({ + where: { + id: { + [Op.IN]: usageYearsFO.map((usageYearFO) => usageYearFO.objectID), + }, + }, + }); + + return usageYears.map((usageYear) => ({ + year: usageYear.year, + direction: usageYearsFO.find( + (usageYearFO) => usageYearFO.objectID === usageYear.id + ).refDirection, + })); + } +} From 9d8ac3413efbdf251979c63367bc97efe5b2ff22 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 30 Oct 2023 09:46:17 +0100 Subject: [PATCH 14/67] Split individual services Minor refactor --- .../categories/category-service.ts | 44 +++++ .../flows/flow-search-service.ts | 171 +++++++++++++++++ src/domain-services/flows/flow-service.ts | 180 ------------------ src/domain-services/flows/graphql/resolver.ts | 14 +- src/domain-services/flows/graphql/types.ts | 66 +------ .../organizations/organization-service.ts | 37 ++-- 6 files changed, 247 insertions(+), 265 deletions(-) create mode 100644 src/domain-services/categories/category-service.ts create mode 100644 src/domain-services/flows/flow-search-service.ts delete mode 100644 src/domain-services/flows/flow-service.ts diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts new file mode 100644 index 00000000..76f7307d --- /dev/null +++ b/src/domain-services/categories/category-service.ts @@ -0,0 +1,44 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { Service } from 'typedi'; +import { FlowCategory } from '../flows/graphql/types'; + +@Service() +export class CategoryService { + async getFlowCategory(flow: any, models: Database): Promise { + const flowIdBranded = createBrandedValue(flow.id); + const flowLinks = await models.flowLink.find({ + where: { + childID: flowIdBranded, + }, + }); + + const flowLinksBrandedIds = flowLinks.map((flowLink) => + createBrandedValue(flowLink.parentID) + ); + + const categoriesRef = await models.categoryRef.find({ + where: { + objectID: { + [Op.IN]: flowLinksBrandedIds, + }, + versionID: flow.versionID, + }, + }); + + const categories = await models.category.find({ + where: { + id: { + [Op.IN]: categoriesRef.map((catRef) => catRef.categoryID), + }, + }, + }); + + return categories.map((cat) => ({ + id: cat.id, + name: cat.name, + group: cat.group, + })); + } +} diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts new file mode 100644 index 00000000..c48d3ade --- /dev/null +++ b/src/domain-services/flows/flow-search-service.ts @@ -0,0 +1,171 @@ +import { Service } from 'typedi'; +import { + FlowCategory, + FlowLocation, + FlowOrganization, + FlowPlan, + FlowSearchResult, + FlowSortField, + FlowUsageYear, +} from './graphql/types'; +import { Database } from '@unocha/hpc-api-core/src/db/type'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { OrganizationService } from '../organizations/organization-service'; +import { LocationService } from '../location/location-service'; +import { PlanService } from '../plans/plan-service'; +import { UsageYearService } from '../usage-years/usage-year-service'; +import { CategoryService } from '../categories/category-service'; +import { prepareConditionFromCursor } from '../../utils/graphql/pagination'; + +@Service() +export class FlowSearchService { + constructor( + private readonly organizationService: OrganizationService, + private readonly locationService: LocationService, + private readonly planService: PlanService, + private readonly usageYearService: UsageYearService, + private readonly categoryService: CategoryService + ) {} + + async search( + models: Database, + first: number, + afterCursor?: number, + beforeCursor?: number, + sortField?: FlowSortField, + sortOrder?: 'asc' | 'desc' + ): Promise { + if (beforeCursor && afterCursor) { + throw new Error('Cannot use before and after cursor at the same time'); + } + + const sortCondition = { + column: sortField ?? 'id', + order: sortOrder ?? 'desc', + }; + + let flows; + const countRes = await models.flow.count(); + const count = countRes[0] as { count: number }; + + const hasCursor = afterCursor || beforeCursor; + + if (hasCursor) { + const condition = prepareConditionFromCursor( + sortCondition, + afterCursor, + beforeCursor + ); + + flows = await models.flow.find({ + orderBy: sortCondition, + limit: first, + where: { + ...condition, + }, + }); + } else { + flows = await models.flow.find({ + orderBy: sortCondition, + limit: first, + }); + } + + const items = await Promise.all( + flows.map(async (flow) => { + const categories: FlowCategory[] = + await this.categoryService.getFlowCategory(flow, models); + + const organizationsFO: any[] = []; + const locationsFO: any[] = []; + const plansFO: any[] = []; + const usageYearsFO: any[] = []; + + await this.getFlowObjects( + flow, + models, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO + ); + + const organizationsPromise: Promise = + this.organizationService.getFlowObjectOrganizations( + organizationsFO, + models + ); + + const locationsPromise: Promise = + this.locationService.getFlowObjectLocations(locationsFO, models); + + const plansPromise: Promise = + this.planService.getFlowObjectPlans(plansFO, models); + + const usageYearsPromise: Promise = + this.usageYearService.getFlowObjectUsageYears(usageYearsFO, models); + + const [organizations, locations, plans, usageYears] = await Promise.all( + [ + organizationsPromise, + locationsPromise, + plansPromise, + usageYearsPromise, + ] + ); + + return { + id: flow.id.valueOf(), + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt, + categories: categories, + organizations: organizations, + locations: locations, + plans: plans, + usageYears: usageYears, + cursor: flow.id.valueOf(), + }; + }) + ); + + return { + items, + hasNextPage: first <= flows.length, + hasPreviousPage: afterCursor !== undefined, + startCursor: flows.length ? flows[0].id.valueOf() : 0, + endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, + pageSize: flows.length, + sortField: sortCondition.column, + sortOrder: sortCondition.order, + total: count.count, + }; + } + + private async getFlowObjects( + flow: any, + models: Database, + organizationsFO: any[], + locationsFO: any[], + plansFO: any[], + usageYearsFO: any[] + ): Promise { + const flowIdBranded = createBrandedValue(flow.id); + const flowObjects = await models.flowObject.find({ + where: { + flowID: flowIdBranded, + }, + }); + + flowObjects.forEach((flowObject) => { + if (flowObject.objectType === 'organization') { + organizationsFO.push(flowObject); + } else if (flowObject.objectType === 'location') { + locationsFO.push(flowObject); + } else if (flowObject.objectType === 'plan') { + plansFO.push(flowObject); + } else if (flowObject.objectType === 'usageYear') { + usageYearsFO.push(flowObject); + } + }); + } +} diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts deleted file mode 100644 index 0a63abe2..00000000 --- a/src/domain-services/flows/flow-service.ts +++ /dev/null @@ -1,180 +0,0 @@ -import { Service } from 'typedi'; -import { - FlowCategory, - FlowLocation, - FlowOrganization, - FlowPlan, - FlowSearchResult, - FlowSortField, - FlowUsageYear, -} from './graphql/types'; -import { Database } from '@unocha/hpc-api-core/src/db/type'; -import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { OrganizationService } from '../organizations/organization-service'; -import { LocationService } from '../location/location-service'; -import { PlanService } from '../plans/plan-service'; -import { UsageYearService } from '../usage-years/usage-year-service'; - -@Service() -export class FlowService { - constructor(private readonly organizationService: OrganizationService, - private readonly locationService: LocationService, - private readonly planService: PlanService, - private readonly usageYearService: UsageYearService) {} - - async search( - models: Database, - first: number, - afterCursor: string, - sortField: FlowSortField, - sortOrder: 'asc' | 'desc' - ): Promise { - let afterIndex = 0; - - const sortCondition = { - column: sortField ?? 'id', - order: sortOrder ?? 'DESC', - }; - - let flows = await models.flow.find({ - orderBy: sortCondition, - limit: first, - }); - const countRes = await models.flow.count(); - const count = countRes[0] as { count: number }; - - if (afterCursor) { - const after = flows.findIndex( - (flow) => flow.id.toString() === afterCursor - ); - if (after < 0) { - throw new Error('Cursor not found'); - } - afterIndex = after + 1; - } - - const pagedData = flows.slice(afterIndex, afterIndex + first); - const edges = await Promise.all( - pagedData.map(async (flow) => { - const categories: FlowCategory[] = await this.getFlowCategories( - flow, - models - ); - - const flowObjects = await this.getFlowObjects(flow, models); - - const organizationsFO: any[] = []; - const locationsFO: any[] = []; - const plansFO: any[] = []; - const usageYearsFO: any[] = []; - - flowObjects.forEach((flowObject) => { - if (flowObject.objectType === 'organization') { - organizationsFO.push(flowObject); - } else if (flowObject.objectType === 'location') { - locationsFO.push(flowObject); - } else if (flowObject.objectType === 'plan') { - plansFO.push(flowObject); - } else if (flowObject.objectType === 'usageYear') { - usageYearsFO.push(flowObject); - } - }); - - const organizations: FlowOrganization[] = await this.organizationService.getFlowObjectOrganizations( - organizationsFO, - models - ); - - const locations: FlowLocation[] = await this.locationService.getFlowObjectLocations( - locationsFO, - models - ); - - const plans = await this.planService.getFlowObjectPlans(plansFO, models); - - const usageYears = await this.usageYearService.getFlowObjectUsageYears(usageYearsFO, models); - - return { - node: { - id: flow.id.valueOf(), - amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt, - categories: categories, - organizations: organizations, - locations: locations, - plans: plans, - usageYears: usageYears, - }, - cursor: flow.id.toString(), - }; - }) - ); - - return { - edges, - pageInfo: { - hasNextPage: count.count > afterIndex, - hasPreviousPage: afterIndex > 0, - startCursor: pagedData.length ? pagedData[0].id.toString() : '', - endCursor: pagedData.length - ? pagedData[pagedData.length - 1].id.toString() - : '', - pageSize: pagedData.length, - sortField: sortCondition.column, - sortOrder: sortCondition.order, - }, - totalCount: count.count, - }; - } - - private async getFlowCategories( - flow: any, - models: Database - ): Promise { - const flowIdBranded = createBrandedValue(flow.id); - const flowLinks = await models.flowLink.find({ - where: { - childID: flowIdBranded, - }, - }); - - const flowLinksBrandedIds = flowLinks.map((flowLink) => - createBrandedValue(flowLink.parentID) - ); - - const categoriesRef = await models.categoryRef.find({ - where: { - objectID: { - [Op.IN]: flowLinksBrandedIds, - }, - versionID: flow.versionID, - }, - }); - - const categories = await models.category.find({ - where: { - id: { - [Op.IN]: categoriesRef.map((catRef) => catRef.categoryID), - }, - }, - }); - - return categories.map((cat) => ({ - id: cat.id, - name: cat.name, - group: cat.group, - })); - } - - private async getFlowObjects(flow: any, models: Database): Promise { - const flowIdBranded = createBrandedValue(flow.id); - const flowObjects = await models.flowObject.find({ - where: { - flowID: flowIdBranded, - }, - }); - - return flowObjects; - } -} diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index baebb962..b78c4393 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -1,19 +1,20 @@ -import Flow, { FlowSearchResult, FlowSortField } from './types'; +import Flow, { FlowSearchResult } from './types'; import { Service } from 'typedi'; -import { Arg, Args, Ctx, Query, Resolver } from 'type-graphql'; -import { FlowService } from '../flow-service'; +import { Arg, Ctx, Query, Resolver } from 'type-graphql'; +import { FlowSearchService } from '../flow-search-service'; import Context from '../../Context'; @Service() @Resolver(Flow) export default class FlowResolver { - constructor(private flowService: FlowService) {} + constructor(private flowSearchService: FlowSearchService) {} @Query(() => FlowSearchResult) async searchFlows( @Ctx() context: Context, @Arg('first', { nullable: false }) first: number, - @Arg('afterCursor', { nullable: true }) afterCursor: string, + @Arg('afterCursor', { nullable: true }) afterCursor: number, + @Arg('beforeCursor', { nullable: true }) beforeCursor: number, @Arg('sortField', { nullable: true }) sortField: | 'id' @@ -38,10 +39,11 @@ export default class FlowResolver { | 'deletedAt', @Arg('sortOrder', { nullable: true }) sortOrder: 'asc' | 'desc' ): Promise { - return await this.flowService.search( + return await this.flowSearchService.search( context.models, first, afterCursor, + beforeCursor, sortField, sortOrder ); diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index f8864e51..5efe1be2 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -1,4 +1,5 @@ import { Field, ObjectType } from 'type-graphql'; +import { ItemPaged, PageInfo } from '../../../utils/graphql/pagination'; @ObjectType() export class FlowCategory { @@ -52,7 +53,7 @@ export class FlowUsageYear { } @ObjectType() -export default class Flow { +export default class Flow implements ItemPaged { @Field({ nullable: false }) id: number; @@ -76,70 +77,15 @@ export default class Flow { @Field(() => [FlowUsageYear], { nullable: false }) usageYears: FlowUsageYear[]; -} - -@ObjectType() -export class FlowEdge { - @Field({ nullable: false }) - node: Flow; @Field({ nullable: false }) - cursor: string; + cursor: number; } @ObjectType() -export class PageInfo { - @Field({ nullable: false }) - hasNextPage: boolean; - - @Field({ nullable: false }) - hasPreviousPage: boolean; - - @Field({ nullable: false }) - startCursor: string; - - @Field({ nullable: false }) - endCursor: string; - - @Field({ nullable: false }) - pageSize: number; - - @Field({ nullable: false }) - sortField: - | 'id' - | 'amountUSD' - | 'versionID' - | 'activeStatus' - | 'restricted' - | 'newMoney' - | 'flowDate' - | 'decisionDate' - | 'firstReportedDate' - | 'budgetYear' - | 'origAmount' - | 'origCurrency' - | 'exchangeRate' - | 'description' - | 'notes' - | 'versionStartDate' - | 'versionEndDate' - | 'createdAt' - | 'updatedAt' - | 'deletedAt'; - - @Field({ nullable: false }) - sortOrder: string; -} -@ObjectType() -export class FlowSearchResult { - @Field(() => [FlowEdge], { nullable: false }) - edges: FlowEdge[]; - - @Field(() => PageInfo, { nullable: false }) - pageInfo: PageInfo; - - @Field({ nullable: false }) - totalCount: number; +export class FlowSearchResult extends PageInfo { + @Field(() => [Flow], { nullable: false }) + items: Flow[]; } export type FlowSortField = diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 36e5a525..66805bb8 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -1,24 +1,23 @@ -import { Database } from "@unocha/hpc-api-core/src/db"; -import { Service } from "typedi"; +import { Database } from '@unocha/hpc-api-core/src/db'; +import { Service } from 'typedi'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @Service() export class OrganizationService { + async getFlowObjectOrganizations(organizationsFO: any[], models: Database) { + const organizations = await models.organization.find({ + where: { + id: { + [Op.IN]: organizationsFO.map((orgFO) => orgFO.objectID), + }, + }, + }); - async getFlowObjectOrganizations(organizationsFO: any[], models: Database){ - const organizations = await models.organization.find({ - where: { - id: { - [Op.IN]: organizationsFO.map((orgFO) => orgFO.objectID), - }, - }, - }); - - return organizations.map((org) => ({ - id: org.id, - refDirection: organizationsFO.find((orgFO) => orgFO.objectID === org.id) - .refDirection, - name: org.name, - })); - } -} \ No newline at end of file + return organizations.map((org) => ({ + id: org.id, + refDirection: organizationsFO.find((orgFO) => orgFO.objectID === org.id) + .refDirection, + name: org.name, + })); + } +} From b870b5695a92777a65fbda3026f50b55a9995071 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 30 Oct 2023 10:39:21 +0100 Subject: [PATCH 15/67] Add tests for flow resolver --- tests/resolvers/flows.spec.ts | 293 ++++++++++++++++++++++++++++++++-- 1 file changed, 277 insertions(+), 16 deletions(-) diff --git a/tests/resolvers/flows.spec.ts b/tests/resolvers/flows.spec.ts index 53f9957d..364ed210 100644 --- a/tests/resolvers/flows.spec.ts +++ b/tests/resolvers/flows.spec.ts @@ -1,22 +1,283 @@ import ContextProvider from '../testContext'; +const fullQuery = `query { + searchFlows(first: 10, sortOrder: "DESC", sortField: "id") { + total + + items { + id + + createdAt + + amountUSD + + categories { + name + + group + } + + organizations { + refDirection + name + } + + locations { + name + } + + plans { + name + } + + usageYears { + year + direction + } + + cursor + } + + startCursor + + hasNextPage + + endCursor + + hasPreviousPage + + pageSize + } +}`; + +const simpliedQuery = `query { + searchFlows( + first: 10 + sortOrder: "DESC" + sortField: "id" + ) { + total + + items { + id + + createdAt + + amountUSD + + cursor + } + + startCursor + + hasNextPage + + endCursor + + hasPreviousPage + + pageSize + } +}`; describe('Query should return Flow search', () => { - - it('All data should be returned', async () => { - const response = await ContextProvider.Instance.apolloTestServer.executeOperation({ - query: 'query { searchFlows (first:10) { totalCount edges { node { id createdAt amountUSD category } cursor } pageInfo { startCursor hasNextPage endCursor hasPreviousPage pageSize } } }', - - }); - - expect(response).toBeDefined(); - expect(response.errors).toBeUndefined(); - expect(response.data).toBeDefined(); - const data = response.data as any; - expect(data.flows).toBeDefined(); - expect(data.flows.length).toBeGreaterThan(0); - const flows = data.flows[0]; - expect(flows.id).toBeDefined(); + it('All data should be returned', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: fullQuery, }); -}); + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + + const data = response.data as any; + expect(data.searchFlows).toBeDefined(); + + const searchFlowsResponse = data.searchFlows; + expect(searchFlowsResponse.pageSize).toBe(10); + expect(searchFlowsResponse.hasPreviousPage).toBe(false); + expect(searchFlowsResponse.hasNextPage).toBe(true); + expect(searchFlowsResponse.endCursor).toBeDefined(); + expect(searchFlowsResponse.startCursor).toBeDefined(); + expect(searchFlowsResponse.total).toBeDefined(); + expect(searchFlowsResponse.items).toBeDefined(); + + const flows = searchFlowsResponse.items; + expect(flows.length).toBe(10); + + const flow = flows[0]; + expect(flow.id).toBeDefined(); + expect(flow.cursor).toBeDefined(); + expect(flow.createdAt).toBeDefined(); + expect(flow.amountUSD).toBeDefined(); + expect(flow.categories).toBeDefined(); + expect(flow.categories.length).toBeGreaterThan(0); + expect(flow.organizations).toBeDefined(); + expect(flow.organizations.length).toBeGreaterThan(0); + expect(flow.locations).toBeDefined(); + expect(flow.locations.length).toBeGreaterThan(0); + expect(flow.plans).toBeDefined(); + expect(flow.plans.length).toBeGreaterThan(0); + expect(flow.usageYears).toBeDefined(); + expect(flow.usageYears.length).toBeGreaterThan(0); + }); + + it('All data should be returned', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: simpliedQuery, + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeUndefined(); + expect(response.data).toBeDefined(); + + const data = response.data as any; + expect(data.searchFlows).toBeDefined(); + + const searchFlowsResponse = data.searchFlows; + expect(searchFlowsResponse.pageSize).toBe(10); + expect(searchFlowsResponse.hasPreviousPage).toBe(false); + expect(searchFlowsResponse.hasNextPage).toBe(true); + expect(searchFlowsResponse.endCursor).toBeDefined(); + expect(searchFlowsResponse.startCursor).toBeDefined(); + expect(searchFlowsResponse.total).toBeDefined(); + expect(searchFlowsResponse.items).toBeDefined(); + + const flows = searchFlowsResponse.items; + expect(flows.length).toBe(10); + + const flow = flows[0]; + expect(flow.id).toBeDefined(); + expect(flow.cursor).toBeDefined(); + expect(flow.createdAt).toBeDefined(); + expect(flow.amountUSD).toBeDefined(); + + expect(flow.categories).toBeUndefined(); + expect(flow.organizations).toBeUndefined(); + expect(flow.locations).toBeUndefined(); + expect(flow.plans).toBeUndefined(); + expect(flow.usageYears).toBeUndefined(); + }); + + it('Should return error when invalid sort field', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: `query { + searchFlows( + first: 10 + sortOrder: "DESC" + sortField: "invalid" + ) { + total + + items { + id + + createdAt + + amountUSD + + cursor + } + } + }`, + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeDefined(); + expect(response.data).toBeNull(); + }); + + it('Should return error when invalid afterCursor', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: `query { + searchFlows( + first: 10 + sortOrder: "DESC" + sortField: "id" + afterCursor: "invalid" + ) { + total + + items { + id + + createdAt + + amountUSD + + cursor + } + } + }`, + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeDefined(); + expect(response.data).toBeUndefined(); + }); + + it('Should return error when invalid beforeCursor', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: `query { + searchFlows( + first: 10 + sortOrder: "DESC" + sortField: "id" + bedoreCursor: "invalid" + ) { + total + + items { + id + + createdAt + + amountUSD + + cursor + } + } + }`, + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeDefined(); + expect(response.data).toBeUndefined(); + }); + + it('Should return error when both afterCursor and beforeCursor are provided', async () => { + const response = + await ContextProvider.Instance.apolloTestServer.executeOperation({ + query: `query { + searchFlows( + first: 10 + sortOrder: "DESC" + sortField: "id" + afterCursor: "20" + beforeCursor: "40" + ) { + total + + items { + id + + createdAt + + amountUSD + + cursor + } + } + }`, + }); + + expect(response).toBeDefined(); + expect(response.errors).toBeDefined(); + expect(response.data).toBeUndefined(); + }); +}); From 596245f13b4dbfefa7c03ee9f691bfaaa6989cb4 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 30 Oct 2023 10:40:08 +0100 Subject: [PATCH 16/67] Add Pagination utils with tests --- .../flows/flow-search-service.ts | 2 +- src/domain-services/flows/graphql/types.ts | 2 +- src/utils/graphql/pagination.ts | 64 +++++++++++++++++ tests/unit/pagination.spec.ts | 72 +++++++++++++++++++ 4 files changed, 138 insertions(+), 2 deletions(-) create mode 100644 src/utils/graphql/pagination.ts create mode 100644 tests/unit/pagination.spec.ts diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index c48d3ade..956048bf 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -129,7 +129,7 @@ export class FlowSearchService { ); return { - items, + flows: items, hasNextPage: first <= flows.length, hasPreviousPage: afterCursor !== undefined, startCursor: flows.length ? flows[0].id.valueOf() : 0, diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 5efe1be2..5a2c89c3 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -85,7 +85,7 @@ export default class Flow implements ItemPaged { @ObjectType() export class FlowSearchResult extends PageInfo { @Field(() => [Flow], { nullable: false }) - items: Flow[]; + flows: Flow[]; } export type FlowSortField = diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts new file mode 100644 index 00000000..bdc8008c --- /dev/null +++ b/src/utils/graphql/pagination.ts @@ -0,0 +1,64 @@ +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { ObjectType, Field } from 'type-graphql'; + +export interface ItemPaged { + cursor: number; +} + +@ObjectType() +export class PageInfo { + @Field({ nullable: false }) + hasNextPage: boolean; + + @Field({ nullable: false }) + hasPreviousPage: boolean; + + @Field({ nullable: false }) + startCursor: number; + + @Field({ nullable: false }) + endCursor: number; + + @Field({ nullable: false }) + pageSize: number; + + @Field(() => String, { nullable: false }) + sortField: TSortFields; + + @Field({ nullable: false }) + sortOrder: string; + + @Field({ nullable: false }) + total: number; +} + +export function prepareConditionFromCursor( + sortCondition: { column: string; order: 'asc' | 'desc' }, + afterCursor?: number, + beforeCursor?: number +): any { + if (afterCursor && beforeCursor) { + throw new Error('Cannot use before and after cursor at the same time'); + } + + if (afterCursor || beforeCursor) { + const isAscending = sortCondition.order === 'asc'; + const cursorValue = afterCursor || beforeCursor; + + let op; + if (isAscending) { + op = afterCursor ? Op.GT : Op.LT; + } else { + op = beforeCursor ? Op.GT : Op.LT; + } + + return { + id: { + [op]: createBrandedValue(cursorValue), + }, + }; + } + + return {}; +} diff --git a/tests/unit/pagination.spec.ts b/tests/unit/pagination.spec.ts new file mode 100644 index 00000000..8d889a08 --- /dev/null +++ b/tests/unit/pagination.spec.ts @@ -0,0 +1,72 @@ +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { prepareConditionFromCursor } from '../../src/utils/graphql/pagination'; + +describe('Based on cursor and order for pagination', () => { + describe('Order is asc', () => { + const sortCondition = { column: 'id', order: 'asc' as const }; + + it("Should return 'GT' when afterCursor is defined", () => { + const afterCursor = 1; + const beforeCursor = undefined; + const result = prepareConditionFromCursor( + sortCondition, + afterCursor, + beforeCursor + ); + expect(result.id).toEqual({ [Op.GT]: afterCursor }); + }); + + it("Should return 'LT' when beforeCursor is defined", () => { + const afterCursor = undefined; + const beforeCursor = 1; + const result = prepareConditionFromCursor( + sortCondition, + afterCursor, + beforeCursor + ); + expect(result.id).toEqual({ [Op.LT]: beforeCursor }); + }); + + it('Should throw an error when both afterCursor and beforeCursor are defined', () => { + const afterCursor = 1; + const beforeCursor = 2; + expect(() => + prepareConditionFromCursor(sortCondition, afterCursor, beforeCursor) + ).toThrowError('Cannot use before and after cursor at the same time'); + }); + }); + + describe("Order is 'desc'", () => { + const sortCondition = { column: 'id', order: 'desc' as const }; + + it("Should return 'LT' when afterCursor is defined", () => { + const afterCursor = 1; + const beforeCursor = undefined; + const result = prepareConditionFromCursor( + sortCondition, + afterCursor, + beforeCursor + ); + expect(result.id).toEqual({ [Op.LT]: afterCursor }); + }); + + it("Should return 'GT' when beforeCursor is defined", () => { + const afterCursor = undefined; + const beforeCursor = 1; + const result = prepareConditionFromCursor( + sortCondition, + afterCursor, + beforeCursor + ); + expect(result.id).toEqual({ [Op.GT]: beforeCursor }); + }); + + it('Should throw an error when both afterCursor and beforeCursor are defined', () => { + const afterCursor = 1; + const beforeCursor = 2; + expect(() => + prepareConditionFromCursor(sortCondition, afterCursor, beforeCursor) + ).toThrowError('Cannot use before and after cursor at the same time'); + }); + }); +}); From e718ceb5d6b44b8c2ebaaf6d30a7d540913f3589 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 2 Nov 2023 13:40:44 +0100 Subject: [PATCH 17/67] Refactor service to increase Query Pool using IN strategy --- .../categories/category-service.ts | 49 ++++- .../flows/flow-search-service.ts | 192 +++++++++--------- src/domain-services/flows/graphql/resolver.ts | 4 +- .../location/location-service.ts | 31 ++- .../organizations/organization-service.ts | 49 ++++- src/domain-services/plans/plan-service.ts | 22 +- .../usage-years/usage-year-service.ts | 39 +++- 7 files changed, 257 insertions(+), 129 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index 76f7307d..fb0f70de 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -3,14 +3,20 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { FlowCategory } from '../flows/graphql/types'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; @Service() export class CategoryService { - async getFlowCategory(flow: any, models: Database): Promise { - const flowIdBranded = createBrandedValue(flow.id); + async getCategoriesForFlows( + flowsIds: FlowId[], + models: Database + ): Promise> { const flowLinks = await models.flowLink.find({ where: { - childID: flowIdBranded, + childID: { + [Op.IN]: flowsIds, + }, }, }); @@ -23,7 +29,6 @@ export class CategoryService { objectID: { [Op.IN]: flowLinksBrandedIds, }, - versionID: flow.versionID, }, }); @@ -35,10 +40,36 @@ export class CategoryService { }, }); - return categories.map((cat) => ({ - id: cat.id, - name: cat.name, - group: cat.group, - })); + // Group categories by flow ID for easy mapping + const categoriesMap = new Map(); + + // Populate the map with categories for each flow + categoriesRef.forEach((catRef) => { + const flowId = catRef.objectID.valueOf(); + + if (!categoriesMap.has(flowId)) { + categoriesMap.set(flowId, []); + } + + const categoriesForFlow = categoriesMap.get(flowId)!; + + const category = categories.find((cat) => cat.id === catRef.categoryID); + + if (!category) { + throw new Error(`Category with ID ${catRef.categoryID} does not exist`); + } + + categoriesForFlow.push(this.mapCategoryToFlowCategory(category)); + }); + + return categoriesMap; } + + private mapCategoryToFlowCategory = ( + category: InstanceDataOfModel + ): FlowCategory => ({ + id: category.id, + name: category.name, + group: category.group, + }); } diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 956048bf..52df18e0 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,13 +1,5 @@ import { Service } from 'typedi'; -import { - FlowCategory, - FlowLocation, - FlowOrganization, - FlowPlan, - FlowSearchResult, - FlowSortField, - FlowUsageYear, -} from './graphql/types'; +import { FlowSearchResult, FlowSortField } from './graphql/types'; import { Database } from '@unocha/hpc-api-core/src/db/type'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { OrganizationService } from '../organizations/organization-service'; @@ -15,7 +7,8 @@ import { LocationService } from '../location/location-service'; import { PlanService } from '../plans/plan-service'; import { UsageYearService } from '../usage-years/usage-year-service'; import { CategoryService } from '../categories/category-service'; -import { prepareConditionFromCursor } from '../../utils/graphql/pagination'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; @Service() export class FlowSearchService { @@ -29,7 +22,7 @@ export class FlowSearchService { async search( models: Database, - first: number, + limit: number, afterCursor?: number, beforeCursor?: number, sortField?: FlowSortField, @@ -44,93 +37,107 @@ export class FlowSearchService { order: sortOrder ?? 'desc', }; - let flows; - const countRes = await models.flow.count(); - const count = countRes[0] as { count: number }; - - const hasCursor = afterCursor || beforeCursor; - - if (hasCursor) { - const condition = prepareConditionFromCursor( - sortCondition, - afterCursor, - beforeCursor - ); + const limitComputed = limit + 1; // Fetch one more item to check for hasNextPage - flows = await models.flow.find({ - orderBy: sortCondition, - limit: first, - where: { - ...condition, + let condition; + if (afterCursor) { + condition = { + id: { + [Op.GT]: createBrandedValue(afterCursor), }, - }); - } else { - flows = await models.flow.find({ + }; + } else if (beforeCursor) { + condition = { + id: { + [Op.GT]: createBrandedValue(beforeCursor), + }, + }; + } + condition = { + ...condition, + activeStatus: true, + }; + + const [flowsIds, countRes] = await Promise.all([ + models.flow.find({ orderBy: sortCondition, - limit: first, - }); + limit: limitComputed, + where: condition, + }), + models.flow.count(), + ]); + + const hasNextPage = flowsIds.length > limit; + if (hasNextPage) { + flowsIds.pop(); // Remove the extra item used to check hasNextPage } - const items = await Promise.all( - flows.map(async (flow) => { - const categories: FlowCategory[] = - await this.categoryService.getFlowCategory(flow, models); - - const organizationsFO: any[] = []; - const locationsFO: any[] = []; - const plansFO: any[] = []; - const usageYearsFO: any[] = []; - - await this.getFlowObjects( - flow, - models, - organizationsFO, - locationsFO, - plansFO, - usageYearsFO - ); - - const organizationsPromise: Promise = - this.organizationService.getFlowObjectOrganizations( - organizationsFO, - models - ); - - const locationsPromise: Promise = - this.locationService.getFlowObjectLocations(locationsFO, models); - - const plansPromise: Promise = - this.planService.getFlowObjectPlans(plansFO, models); - - const usageYearsPromise: Promise = - this.usageYearService.getFlowObjectUsageYears(usageYearsFO, models); - - const [organizations, locations, plans, usageYears] = await Promise.all( - [ - organizationsPromise, - locationsPromise, - plansPromise, - usageYearsPromise, - ] - ); - - return { - id: flow.id.valueOf(), - amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt, - categories: categories, - organizations: organizations, - locations: locations, - plans: plans, - usageYears: usageYears, - cursor: flow.id.valueOf(), - }; - }) + const count = countRes[0] as { count: number }; + + const flowIdsList = flowsIds.map((flow) => flow.id); + + const organizationsFO: any[] = []; + const locationsFO: any[] = []; + const plansFO: any[] = []; + const usageYearsFO: any[] = []; + + await this.getFlowObjects( + flowIdsList, + models, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO ); + const [ + flows, + categoriesMap, + organizationsMap, + locationsMap, + plansMap, + usageYearsMap, + ] = await Promise.all([ + models.flow.find({ + where: { + id: { + [Op.IN]: flowIdsList, + }, + }, + }), + this.categoryService.getCategoriesForFlows(flowIdsList, models), + this.organizationService.getOrganizationsForFlows( + organizationsFO, + models + ), + this.locationService.getLocationsForFlows(locationsFO, models), + this.planService.getPlansForFlows(plansFO, models), + this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), + ]); + + const items = flows.map((flow) => { + const categories = categoriesMap.get(flow.id) || []; + const organizations = organizationsMap.get(flow.id) || []; + const locations = locationsMap.get(flow.id) || []; + const plans = plansMap.get(flow.id) || []; + const usageYears = usageYearsMap.get(flow.id) || []; + + return { + id: flow.id.valueOf(), + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt, + categories, + organizations, + locations, + plans, + usageYears, + cursor: flow.id.valueOf(), + }; + }); + return { flows: items, - hasNextPage: first <= flows.length, + hasNextPage: limit <= flows.length, hasPreviousPage: afterCursor !== undefined, startCursor: flows.length ? flows[0].id.valueOf() : 0, endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, @@ -142,17 +149,18 @@ export class FlowSearchService { } private async getFlowObjects( - flow: any, + flowIds: FlowId[], models: Database, organizationsFO: any[], locationsFO: any[], plansFO: any[], usageYearsFO: any[] ): Promise { - const flowIdBranded = createBrandedValue(flow.id); const flowObjects = await models.flowObject.find({ where: { - flowID: flowIdBranded, + flowID: { + [Op.IN]: flowIds, + }, }, }); diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index b78c4393..890c7d55 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -12,7 +12,7 @@ export default class FlowResolver { @Query(() => FlowSearchResult) async searchFlows( @Ctx() context: Context, - @Arg('first', { nullable: false }) first: number, + @Arg('limit', { nullable: false }) limit: number, @Arg('afterCursor', { nullable: true }) afterCursor: number, @Arg('beforeCursor', { nullable: true }) beforeCursor: number, @Arg('sortField', { nullable: true }) @@ -41,7 +41,7 @@ export default class FlowResolver { ): Promise { return await this.flowSearchService.search( context.models, - first, + limit, afterCursor, beforeCursor, sortField, diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index 678aa102..904e3609 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -29,10 +29,10 @@ export class LocationService { }); } - async getFlowObjectLocations( + async getLocationsForFlows( locationsFO: any[], models: Database - ): Promise { + ): Promise> { const locations = await models.location.find({ where: { id: { @@ -41,9 +41,28 @@ export class LocationService { }, }); - return locations.map((loc) => ({ - id: loc.id.valueOf(), - name: loc.name!, - })); + const locationsMap = new Map(); + + locationsFO.forEach((locFO) => { + const flowId = locFO.flowID; + if (!locationsMap.has(flowId)) { + locationsMap.set(flowId, []); + } + const location = locations.find((loc) => loc.id === locFO.objectID); + + if (!location) { + throw new Error(`Location with ID ${locFO.objectID} does not exist`); + } + const locationMapped = this.mapLocationsToFlowLocations(location); + locationsMap.get(flowId)!.push(locationMapped); + }); + return locationsMap; + } + + private mapLocationsToFlowLocations(location: any) { + return { + id: location.id, + name: location.name, + }; } } diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 66805bb8..215045ff 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -4,7 +4,7 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @Service() export class OrganizationService { - async getFlowObjectOrganizations(organizationsFO: any[], models: Database) { + async getOrganizationsForFlows(organizationsFO: any[], models: Database) { const organizations = await models.organization.find({ where: { id: { @@ -13,11 +13,46 @@ export class OrganizationService { }, }); - return organizations.map((org) => ({ - id: org.id, - refDirection: organizationsFO.find((orgFO) => orgFO.objectID === org.id) - .refDirection, - name: org.name, - })); + const organizationsMap = new Map(); + + organizationsFO.forEach((orgFO) => { + const flowId = orgFO.flowID; + if (!organizationsMap.has(flowId)) { + organizationsMap.set(flowId, []); + } + const organization = organizations.find( + (org) => org.id === orgFO.objectID + ); + + if (!organization) { + throw new Error( + `Organization with ID ${orgFO.objectID} does not exist` + ); + } + organizationsMap.get(flowId)!.push(organization); + }); + organizations.forEach((org) => { + const refDirection = organizationsFO.find( + (orgFO) => orgFO.objectID === org.id + ).refDirection; + + organizationsMap.set( + org.id.valueOf(), + this.mapOrganizationsToOrganizationFlows(org, refDirection) + ); + }); + + return organizationsMap; + } + + private mapOrganizationsToOrganizationFlows( + organization: any, + refDirection: any + ) { + return { + id: organization.id, + refDirection: refDirection, + name: organization.name, + }; } } diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index bea906f5..63d1c182 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -47,10 +47,10 @@ export class PlanService { return years.map((y) => y.year); } - async getFlowObjectPlans( + async getPlansForFlows( plansFO: any[], models: Database - ): Promise { + ): Promise> { const plans = await models.plan.find({ where: { id: { @@ -59,7 +59,7 @@ export class PlanService { }, }); - const flowPlans: FlowPlan[] = []; + const plansMap = new Map(); for (const plan of plans) { const planVersion = await models.planVersion.find({ @@ -69,12 +69,22 @@ export class PlanService { }, }); - flowPlans.push({ + const planMapped = { id: plan.id.valueOf(), name: planVersion[0].name, - }); + }; + + const flowId = plansFO.find( + (planFO) => planFO.objectID === plan.id + ).flowID; + + if (!plansMap.has(flowId)) { + plansMap.set(flowId, []); + } + + plansMap.get(flowId)!.push(planMapped); } - return flowPlans; + return plansMap; } } diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index 7712f235..e09197d2 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -5,10 +5,10 @@ import { FlowUsageYear } from '../flows/graphql/types'; @Service() export class UsageYearService { - async getFlowObjectUsageYears( + async getUsageYearsForFlows( usageYearsFO: any[], models: Database - ): Promise { + ): Promise> { const usageYears = await models.usageYear.find({ where: { id: { @@ -17,11 +17,36 @@ export class UsageYearService { }, }); - return usageYears.map((usageYear) => ({ + const usageYearsMap = new Map(); + + usageYearsFO.forEach((usageYearFO) => { + const flowId = usageYearFO.flowID; + if (!usageYearsMap.has(flowId)) { + usageYearsMap.set(flowId, []); + } + const usageYear = usageYears.find( + (usageYear) => usageYear.id === usageYearFO.objectID + ); + + if (!usageYear) { + throw new Error( + `Usage year with ID ${usageYearFO.objectID} does not exist` + ); + } + const usageYearMapped = this.mapUsageYearsToFlowUsageYears( + usageYear, + usageYearFO.refDirection + ); + usageYearsMap.get(flowId)!.push(usageYearMapped); + }); + + return usageYearsMap; + } + + private mapUsageYearsToFlowUsageYears(usageYear: any, refDirection: any) { + return { year: usageYear.year, - direction: usageYearsFO.find( - (usageYearFO) => usageYearFO.objectID === usageYear.id - ).refDirection, - })); + direction: refDirection, + }; } } From ea9f12f245c6ad327f0116f5459fb1169f85a482 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 6 Nov 2023 08:53:47 +0100 Subject: [PATCH 18/67] Add multiple fields to response --- .../categories/category-service.ts | 52 +++--- .../external-reference-service.ts | 54 ++++++ .../flows/flow-link-service.ts | 38 ++++ .../flows/flow-search-service.ts | 104 +++++++---- src/domain-services/flows/graphql/args.ts | 15 ++ src/domain-services/flows/graphql/resolver.ts | 45 ++--- src/domain-services/flows/graphql/types.ts | 164 ++++++++++++++++-- .../report-details/report-detail-service.ts | 61 +++++++ src/utils/graphql/pagination.ts | 30 +++- 9 files changed, 463 insertions(+), 100 deletions(-) create mode 100644 src/domain-services/external-reference/external-reference-service.ts create mode 100644 src/domain-services/flows/flow-link-service.ts create mode 100644 src/domain-services/flows/graphql/args.ts create mode 100644 src/domain-services/report-details/report-detail-service.ts diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index fb0f70de..7d8464f8 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -3,26 +3,19 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { FlowCategory } from '../flows/graphql/types'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +// TODO: add proper type for flowLinks @Service() export class CategoryService { async getCategoriesForFlows( - flowsIds: FlowId[], + flowLinks: Map, models: Database ): Promise> { - const flowLinks = await models.flowLink.find({ - where: { - childID: { - [Op.IN]: flowsIds, - }, - }, - }); - - const flowLinksBrandedIds = flowLinks.map((flowLink) => - createBrandedValue(flowLink.parentID) - ); + const flowLinksBrandedIds = []; + for (const flowLink of flowLinks.keys()) { + flowLinksBrandedIds.push(createBrandedValue(flowLink)); + } const categoriesRef = await models.categoryRef.find({ where: { @@ -59,17 +52,34 @@ export class CategoryService { throw new Error(`Category with ID ${catRef.categoryID} does not exist`); } - categoriesForFlow.push(this.mapCategoryToFlowCategory(category)); + categoriesForFlow.push(this.mapCategoryToFlowCategory(category, catRef)); }); return categoriesMap; } - private mapCategoryToFlowCategory = ( - category: InstanceDataOfModel - ): FlowCategory => ({ - id: category.id, - name: category.name, - group: category.group, - }); + private mapCategoryToFlowCategory( + category: InstanceDataOfModel, + categoryRef: InstanceDataOfModel + ): FlowCategory { + return { + id: category.id, + name: category.name, + group: category.group, + createdAt: category.createdAt.toISOString(), + updatedAt: category.updatedAt.toISOString(), + description: category.description ?? '', + parentID: category.parentID ? category.parentID.valueOf() : 0, + code: category.code ?? '', + includeTotals: category.includeTotals ?? false, + categoryRef: { + objectID: categoryRef.objectID.valueOf(), + versionID: categoryRef.versionID, + objectType: categoryRef.objectType, + categoryID: category.id.valueOf(), + createdAt: categoryRef.createdAt.toISOString(), + updatedAt: categoryRef.updatedAt.toISOString(), + }, + }; + } } diff --git a/src/domain-services/external-reference/external-reference-service.ts b/src/domain-services/external-reference/external-reference-service.ts new file mode 100644 index 00000000..5c016473 --- /dev/null +++ b/src/domain-services/external-reference/external-reference-service.ts @@ -0,0 +1,54 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { Service } from 'typedi'; +import { FlowExternalReference } from '../flows/graphql/types'; + +@Service() +export class ExternalReferenceService { + async getExternalReferencesForFlows(flowIDs: FlowId[], models: Database) { + const externalReferences = await models.externalReference.find({ + where: { + flowID: { + [Op.IN]: flowIDs, + }, + }, + skipValidation: true, + }); + + const externalReferencesMap = new Map(); + + flowIDs.forEach((flowID) => { + externalReferencesMap.set(flowID, []); + }); + + externalReferences.forEach((externalReference) => { + const flowID = externalReference.flowID; + const externalReferenceMapped = + this.mapExternalReferenceToExternalReferenceFlows(externalReference); + + if (!externalReferencesMap.has(flowID)) { + externalReferencesMap.set(flowID, []); + } + + externalReferencesMap.get(flowID).push(externalReferenceMapped); + }); + + return externalReferencesMap; + } + + private mapExternalReferenceToExternalReferenceFlows( + externalReference: InstanceDataOfModel + ): FlowExternalReference { + return { + systemID: externalReference.systemID, + flowID: externalReference.flowID, + externalRecordID: externalReference.externalRecordID, + externalRecordDate: externalReference.externalRecordDate.toISOString(), + createdAt: externalReference.createdAt.toISOString(), + updatedAt: externalReference.updatedAt.toISOString(), + versionID: externalReference.versionID ?? 0, + }; + } +} diff --git a/src/domain-services/flows/flow-link-service.ts b/src/domain-services/flows/flow-link-service.ts new file mode 100644 index 00000000..1b21e7fd --- /dev/null +++ b/src/domain-services/flows/flow-link-service.ts @@ -0,0 +1,38 @@ +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Database } from '@unocha/hpc-api-core/src/db/type'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Service } from 'typedi'; + +@Service() +export class FlowLinkService { + async getFlowLinksForFlows( + flowIds: FlowId[], + models: Database + ): Promise> { + const flowLinks = await models.flowLink.find({ + where: { + childID: { + [Op.IN]: flowIds, + }, + }, + }); + + // Group flowLinks by flow ID for easy mapping + const flowLinksMap = new Map(); + + // Populate the map with flowLinks for each flow + flowLinks.forEach((flowLink) => { + const flowId = flowLink.childID.valueOf(); + + if (!flowLinksMap.has(flowId)) { + flowLinksMap.set(flowId, []); + } + + const flowLinksForFlow = flowLinksMap.get(flowId)!; + + flowLinksForFlow.push(flowLink); + }); + + return flowLinksMap; + } +} diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 52df18e0..37a8d858 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -9,6 +9,9 @@ import { UsageYearService } from '../usage-years/usage-year-service'; import { CategoryService } from '../categories/category-service'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { FlowLinkService } from './flow-link-service'; +import { ExternalReferenceService } from '../external-reference/external-reference-service'; +import { ReportDetailService } from '../report-details/report-detail-service'; @Service() export class FlowSearchService { @@ -17,24 +20,28 @@ export class FlowSearchService { private readonly locationService: LocationService, private readonly planService: PlanService, private readonly usageYearService: UsageYearService, - private readonly categoryService: CategoryService + private readonly categoryService: CategoryService, + private readonly flowLinkService: FlowLinkService, + private readonly externalReferenceService: ExternalReferenceService, + private readonly reportDetailService: ReportDetailService ) {} async search( models: Database, - limit: number, + limit: number = 50, + sortOrder: 'asc' | 'desc' = 'desc', + sortField: FlowSortField = 'id', afterCursor?: number, beforeCursor?: number, - sortField?: FlowSortField, - sortOrder?: 'asc' | 'desc' + filters?: any ): Promise { if (beforeCursor && afterCursor) { throw new Error('Cannot use before and after cursor at the same time'); } const sortCondition = { - column: sortField ?? 'id', - order: sortOrder ?? 'desc', + column: sortField, + order: sortOrder, }; const limitComputed = limit + 1; // Fetch one more item to check for hasNextPage @@ -49,63 +56,70 @@ export class FlowSearchService { } else if (beforeCursor) { condition = { id: { - [Op.GT]: createBrandedValue(beforeCursor), + [Op.LT]: createBrandedValue(beforeCursor), }, }; } - condition = { - ...condition, - activeStatus: true, - }; - const [flowsIds, countRes] = await Promise.all([ + if (filters?.activeStatus !== undefined) { + condition = { + ...condition, + activeStatus: filters.activeStatus, + }; + } + + const [flows, countRes] = await Promise.all([ models.flow.find({ orderBy: sortCondition, limit: limitComputed, where: condition, }), - models.flow.count(), + models.flow.count({ where: condition }), ]); - const hasNextPage = flowsIds.length > limit; + const hasNextPage = flows.length > limit; if (hasNextPage) { - flowsIds.pop(); // Remove the extra item used to check hasNextPage + flows.pop(); // Remove the extra item used to check hasNextPage } const count = countRes[0] as { count: number }; - const flowIdsList = flowsIds.map((flow) => flow.id); + const flowIds = flows.map((flow) => flow.id); const organizationsFO: any[] = []; const locationsFO: any[] = []; const plansFO: any[] = []; const usageYearsFO: any[] = []; - await this.getFlowObjects( - flowIdsList, - models, - organizationsFO, - locationsFO, - plansFO, - usageYearsFO + const [externalReferencesMap] = await Promise.all([ + this.externalReferenceService.getExternalReferencesForFlows( + flowIds, + models + ), + this.getFlowObjects( + flowIds, + models, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO + ), + ]); + + const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( + flowIds, + models ); const [ - flows, categoriesMap, organizationsMap, locationsMap, plansMap, usageYearsMap, + reportDetailsMap, ] = await Promise.all([ - models.flow.find({ - where: { - id: { - [Op.IN]: flowIdsList, - }, - }, - }), - this.categoryService.getCategoriesForFlows(flowIdsList, models), + this.categoryService.getCategoriesForFlows(flowLinksMap, models), this.organizationService.getOrganizationsForFlows( organizationsFO, models @@ -113,6 +127,7 @@ export class FlowSearchService { this.locationService.getLocationsForFlows(locationsFO, models), this.planService.getPlansForFlows(plansFO, models), this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), + this.reportDetailService.getReportDetailsForFlows(flowIds, models), ]); const items = flows.map((flow) => { @@ -121,16 +136,39 @@ export class FlowSearchService { const locations = locationsMap.get(flow.id) || []; const plans = plansMap.get(flow.id) || []; const usageYears = usageYearsMap.get(flow.id) || []; + const externalReferences = externalReferencesMap.get(flow.id) || []; + const reportDetails = reportDetailsMap.get(flow.id) || []; + + const childIDs: number[] = (flowLinksMap.get(flow.id) || []).map( + (flowLink) => flowLink.childID.valueOf() + ) as number[]; + + const parentIDs: number[] = flowLinksMap + .get(flow.id) + ?.map((flowLink) => flowLink.parentID.valueOf()) as number[]; return { + // Mandatory fields id: flow.id.valueOf(), + versionID: flow.versionID, amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt, + updatedAt: flow.updatedAt.toISOString(), + activeStatus: flow.activeStatus, + restricted: flow.restricted, + // Optional fields categories, organizations, locations, plans, usageYears, + childIDs, + parentIDs, + origAmount: flow.origAmount ? flow.origAmount.toString() : '', + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', + externalReferences, + reportDetails, + parkedParentSource: 'placeholder', + // Paged item field cursor: flow.id.valueOf(), }; }); diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts new file mode 100644 index 00000000..d55b3137 --- /dev/null +++ b/src/domain-services/flows/graphql/args.ts @@ -0,0 +1,15 @@ +import { ArgsType, Field, InputType } from 'type-graphql'; +import { FlowSortField } from './types'; +import { PaginationArgs } from '../../../utils/graphql/pagination'; + +@InputType() +export class SearchFlowsFilters { + @Field({ nullable: true }) + activeStatus: boolean; +} + +@ArgsType() +export class SearchFlowsArgs extends PaginationArgs { + @Field(() => SearchFlowsFilters, { nullable: true }) + filters: SearchFlowsFilters; +} diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 890c7d55..342a0df9 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -1,8 +1,10 @@ -import Flow, { FlowSearchResult } from './types'; +import Flow, { FlowSearchResult, FlowSortField } from './types'; import { Service } from 'typedi'; -import { Arg, Ctx, Query, Resolver } from 'type-graphql'; +import { Arg, Args, Ctx, Query, Resolver } from 'type-graphql'; import { FlowSearchService } from '../flow-search-service'; import Context from '../../Context'; +import { SearchFlowsFilters } from './args'; +import { PaginationArgs } from '../../../utils/graphql/pagination'; @Service() @Resolver(Flow) @@ -12,40 +14,23 @@ export default class FlowResolver { @Query(() => FlowSearchResult) async searchFlows( @Ctx() context: Context, - @Arg('limit', { nullable: false }) limit: number, - @Arg('afterCursor', { nullable: true }) afterCursor: number, - @Arg('beforeCursor', { nullable: true }) beforeCursor: number, - @Arg('sortField', { nullable: true }) - sortField: - | 'id' - | 'amountUSD' - | 'versionID' - | 'activeStatus' - | 'restricted' - | 'newMoney' - | 'flowDate' - | 'decisionDate' - | 'firstReportedDate' - | 'budgetYear' - | 'origAmount' - | 'origCurrency' - | 'exchangeRate' - | 'description' - | 'notes' - | 'versionStartDate' - | 'versionEndDate' - | 'createdAt' - | 'updatedAt' - | 'deletedAt', - @Arg('sortOrder', { nullable: true }) sortOrder: 'asc' | 'desc' + @Args(() => PaginationArgs, { validate: false }) + pagination: PaginationArgs, + @Arg('activeStatus', { nullable: true }) activeStatus: boolean ): Promise { + const { limit, sortOrder, sortField, afterCursor, beforeCursor } = + pagination; + const filters: SearchFlowsFilters = { + activeStatus, + }; return await this.flowSearchService.search( context.models, limit, + sortOrder, + sortField, afterCursor, beforeCursor, - sortField, - sortOrder + filters ); } } diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 5a2c89c3..105b9c7a 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -2,8 +2,29 @@ import { Field, ObjectType } from 'type-graphql'; import { ItemPaged, PageInfo } from '../../../utils/graphql/pagination'; @ObjectType() -export class FlowCategory { +export class FlowCategoryRef { + @Field({ nullable: false }) + objectID: number; + + @Field({ nullable: false }) + versionID: number; + + @Field({ nullable: false }) + objectType: string; + + @Field({ nullable: false }) + categoryID: number; + + @Field({ nullable: false }) + createdAt: string; + @Field({ nullable: false }) + updatedAt: string; +} + +@ObjectType() +export class FlowCategory { + @Field({ nullable: true }) id: number; @Field({ nullable: false }) @@ -11,6 +32,27 @@ export class FlowCategory { @Field({ nullable: false }) group: string; + + @Field({ nullable: true }) + createdAt: string; + + @Field({ nullable: true }) + updatedAt: string; + + @Field({ nullable: true }) + description: string; + + @Field({ nullable: true }) + parentID: number; + + @Field({ nullable: true }) + code: string; + + @Field({ nullable: true }) + includeTotals: boolean; + + @Field(() => FlowCategoryRef, { nullable: true }) + categoryRef: FlowCategoryRef; } @ObjectType() @@ -53,33 +95,126 @@ export class FlowUsageYear { } @ObjectType() -export default class Flow implements ItemPaged { +export class FlowExternalReference { + @Field({ nullable: false }) + systemID: string; + + @Field({ nullable: false }) + flowID: number; + + @Field({ nullable: false }) + externalRecordID: string; + + @Field({ nullable: false }) + versionID: number; + + @Field({ nullable: false }) + createdAt: string; + + @Field({ nullable: false }) + updatedAt: string; + + @Field({ nullable: false }) + externalRecordDate: string; +} + +@ObjectType() +export class FlowReportDetail { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + flowID: number; + + @Field({ nullable: false }) + versionID: number; + + @Field({ nullable: false }) + contactInfo: string; + + @Field({ nullable: false }) + source: string; + + @Field({ nullable: false }) + date: string; + + @Field({ nullable: false }) + sourceID: string; + + @Field({ nullable: false }) + refCode: string; + + @Field({ nullable: false }) + verified: boolean; + + @Field({ nullable: false }) + createdAt: string; + + @Field({ nullable: false }) + updatedAt: string; + + @Field({ nullable: false }) + organizationID: number; +} + +@ObjectType() +export default class Flow extends ItemPaged { + // Mandatory fields @Field({ nullable: false }) id: number; + @Field({ nullable: false }) + versionID: number; + @Field({ nullable: false }) amountUSD: string; @Field({ nullable: false }) - createdAt: Date; + updatedAt: string; + + @Field({ nullable: false }) + activeStatus: boolean; + + @Field({ nullable: false }) + restricted: boolean; - @Field(() => [FlowCategory], { nullable: false }) + // Optional fields + @Field(() => [FlowCategory], { nullable: true }) categories: FlowCategory[]; - @Field(() => [FlowOrganization], { nullable: false }) + @Field(() => [FlowOrganization], { nullable: true }) organizations: FlowOrganization[]; - @Field(() => [FlowLocation], { nullable: false }) - locations: FlowLocation[]; - - @Field(() => [FlowPlan], { nullable: false }) + @Field(() => [FlowPlan], { nullable: true }) plans: FlowPlan[]; - @Field(() => [FlowUsageYear], { nullable: false }) + @Field(() => [FlowLocation], { nullable: true }) + locations: FlowLocation[]; + + @Field(() => [FlowUsageYear], { nullable: true }) usageYears: FlowUsageYear[]; - @Field({ nullable: false }) - cursor: number; + @Field(() => [Number], { nullable: true }) + childIDs: number[]; + + @Field(() => [Number], { nullable: true }) + parentIDs: number[]; + + @Field({ nullable: true }) + origAmount: string; + + @Field({ nullable: true }) + origCurrency: string; + + @Field(() => [FlowExternalReference], { nullable: true }) + externalReferences: FlowExternalReference[]; + + @Field(() => [FlowReportDetail], { nullable: true }) + reportDetails: FlowReportDetail[]; + + // Missing fields & new Types + @Field({ nullable: true }) + parkedParentSource: string; } @ObjectType() @@ -90,10 +225,12 @@ export class FlowSearchResult extends PageInfo { export type FlowSortField = | 'id' - | 'amountUSD' | 'versionID' + | 'amountUSD' + | 'updatedAt' | 'activeStatus' | 'restricted' + // | 'newMoney' | 'flowDate' | 'decisionDate' @@ -107,5 +244,4 @@ export type FlowSortField = | 'versionStartDate' | 'versionEndDate' | 'createdAt' - | 'updatedAt' | 'deletedAt'; diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts new file mode 100644 index 00000000..ce347b2b --- /dev/null +++ b/src/domain-services/report-details/report-detail-service.ts @@ -0,0 +1,61 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { Service } from 'typedi'; +import { FlowReportDetail } from '../flows/graphql/types'; +@Service() +export class ReportDetailService { + async getReportDetailsForFlows( + flowIds: FlowId[], + models: Database + ): Promise> { + const reportDetails: InstanceDataOfModel[] = + await models.reportDetail.find({ + where: { + flowID: { + [Op.IN]: flowIds, + }, + }, + skipValidation: true, + }); + + const reportDetailsMap = new Map(); + + flowIds.forEach((flowId: FlowId) => { + if (!reportDetailsMap.has(flowId)) { + reportDetailsMap.set(flowId, []); + } + const reportDetail = reportDetails.find( + (report) => report && flowId === report?.flowID + ); + + if (reportDetail) { + const reportDetailMapped = + this.mapReportDetailsToFlowReportDetail(reportDetail); + reportDetailsMap.get(flowId)?.push(reportDetailMapped); + } + }); + + return reportDetailsMap; + } + + private mapReportDetailsToFlowReportDetail( + reportDetail: any + ): FlowReportDetail { + return { + id: reportDetail.id, + flowID: reportDetail.flowId, + versionID: reportDetail.versionID, + contactInfo: reportDetail.contactInfo, + source: reportDetail.source, + date: reportDetail.date.toISOString(), + sourceID: reportDetail.sourceID, + refCode: reportDetail.refCode, + verified: reportDetail.verified, + createdAt: reportDetail.createdAt.toISOString(), + updatedAt: reportDetail.updatedAt.toISOString(), + organizationID: reportDetail.organizationId, + }; + } +} diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index bdc8008c..650ddaba 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -1,8 +1,16 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; -import { ObjectType, Field } from 'type-graphql'; +import { ObjectType, Field, ArgsType } from 'type-graphql'; -export interface ItemPaged { +export type SortOrder = 'asc' | 'desc'; + +export interface IItemPaged { + cursor: number; +} + +@ObjectType() +export class ItemPaged implements IItemPaged { + @Field({ nullable: false }) cursor: number; } @@ -62,3 +70,21 @@ export function prepareConditionFromCursor( return {}; } + +@ArgsType() +export class PaginationArgs { + @Field({ nullable: false }) + limit: number; + + @Field({ nullable: true }) + afterCursor: number; + + @Field({ nullable: true }) + beforeCursor: number; + + @Field(() => String, { nullable: true }) + sortField: TSortFields; + + @Field(() => String, { nullable: true, defaultValue: 'desc' }) + sortOrder: SortOrder; +} From 57ff5348da9933e896a0a2acd2d00bb0b02e6f62 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 6 Nov 2023 09:14:21 +0100 Subject: [PATCH 19/67] Add GraphQL types. --- .../categories/category-service.ts | 46 +++-- .../categories/graphql/types.ts | 44 ++++ .../flows/flow-search-service.ts | 68 ++++++- src/domain-services/flows/graphql/resolver.ts | 4 +- src/domain-services/flows/graphql/types.ts | 192 ++++-------------- src/domain-services/location/graphql/types.ts | 14 +- .../location/location-service.ts | 40 ++-- .../organizations/graphql/types.ts | 14 ++ .../organizations/organization-service.ts | 29 +-- src/domain-services/plans/graphql/types.ts | 13 ++ src/domain-services/plans/plan-service.ts | 64 ++++-- .../report-details/graphql/types.ts | 35 ++++ .../report-details/report-detail-service.ts | 16 +- .../usage-years/grpahql/types.ts | 11 + .../usage-years/usage-year-service.ts | 24 ++- .../graphql}/base-types.ts | 8 +- src/utils/graphql/pagination.ts | 6 - 17 files changed, 367 insertions(+), 261 deletions(-) create mode 100644 src/domain-services/categories/graphql/types.ts create mode 100644 src/domain-services/organizations/graphql/types.ts create mode 100644 src/domain-services/report-details/graphql/types.ts create mode 100644 src/domain-services/usage-years/grpahql/types.ts rename src/{domain-services => utils/graphql}/base-types.ts (61%) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index 7d8464f8..ec19c9f9 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -1,40 +1,46 @@ import { Database } from '@unocha/hpc-api-core/src/db'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { FlowCategory } from '../flows/graphql/types'; +import { Category } from './graphql/types'; import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; // TODO: add proper type for flowLinks @Service() export class CategoryService { async getCategoriesForFlows( - flowLinks: Map, + flowLinks: Map[]>, models: Database - ): Promise> { + ): Promise> { const flowLinksBrandedIds = []; for (const flowLink of flowLinks.keys()) { flowLinksBrandedIds.push(createBrandedValue(flowLink)); } - const categoriesRef = await models.categoryRef.find({ - where: { - objectID: { - [Op.IN]: flowLinksBrandedIds, - }, - }, - }); + // Group categories by flow ID for easy mapping + const categoriesMap = new Map(); - const categories = await models.category.find({ - where: { - id: { - [Op.IN]: categoriesRef.map((catRef) => catRef.categoryID), + if (flowLinksBrandedIds.length === 0) { + return categoriesMap; + } + + const categoriesRef: InstanceDataOfModel[] = + await models.categoryRef.find({ + where: { + objectID: { + [Op.IN]: flowLinksBrandedIds, + }, }, - }, - }); + }); - // Group categories by flow ID for easy mapping - const categoriesMap = new Map(); + const categories: InstanceDataOfModel[] = + await models.category.find({ + where: { + id: { + [Op.IN]: categoriesRef.map((catRef) => catRef.categoryID), + }, + }, + }); // Populate the map with categories for each flow categoriesRef.forEach((catRef) => { @@ -61,7 +67,7 @@ export class CategoryService { private mapCategoryToFlowCategory( category: InstanceDataOfModel, categoryRef: InstanceDataOfModel - ): FlowCategory { + ): Category { return { id: category.id, name: category.name, diff --git a/src/domain-services/categories/graphql/types.ts b/src/domain-services/categories/graphql/types.ts new file mode 100644 index 00000000..5cf55816 --- /dev/null +++ b/src/domain-services/categories/graphql/types.ts @@ -0,0 +1,44 @@ +import { Field, ObjectType } from 'type-graphql'; +import { BaseType } from '../../../utils/graphql/base-types'; + +@ObjectType() +export class CategoryRef extends BaseType { + @Field({ nullable: false }) + objectID: number; + + @Field({ nullable: false }) + versionID: number; + + @Field({ nullable: false }) + objectType: string; + + @Field({ nullable: false }) + categoryID: number; +} + +@ObjectType() +export class Category extends BaseType { + @Field({ nullable: true }) + id: number; + + @Field({ nullable: false }) + name: string; + + @Field({ nullable: false }) + group: string; + + @Field({ nullable: true }) + description: string; + + @Field({ nullable: true }) + parentID: number; + + @Field({ nullable: true }) + code: string; + + @Field({ nullable: true }) + includeTotals: boolean; + + @Field(() => CategoryRef, { nullable: true }) + categoryRef: CategoryRef; +} diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 37a8d858..e548fbd3 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,5 +1,9 @@ import { Service } from 'typedi'; -import { FlowSearchResult, FlowSortField } from './graphql/types'; +import { + FlowParkedParentSource, + FlowSearchResult, + FlowSortField, +} from './graphql/types'; import { Database } from '@unocha/hpc-api-core/src/db/type'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { OrganizationService } from '../organizations/organization-service'; @@ -84,7 +88,7 @@ export class FlowSearchService { const count = countRes[0] as { count: number }; - const flowIds = flows.map((flow) => flow.id); + const flowIds: FlowId[] = flows.map((flow) => flow.id); const organizationsFO: any[] = []; const locationsFO: any[] = []; @@ -131,17 +135,24 @@ export class FlowSearchService { ]); const items = flows.map((flow) => { + const flowLink = flowLinksMap.get(flow.id) || []; const categories = categoriesMap.get(flow.id) || []; const organizations = organizationsMap.get(flow.id) || []; - const locations = locationsMap.get(flow.id) || []; + const locations = [...locationsMap.get(flow.id) || []] ; const plans = plansMap.get(flow.id) || []; const usageYears = usageYearsMap.get(flow.id) || []; const externalReferences = externalReferencesMap.get(flow.id) || []; const reportDetails = reportDetailsMap.get(flow.id) || []; - const childIDs: number[] = (flowLinksMap.get(flow.id) || []).map( - (flowLink) => flowLink.childID.valueOf() - ) as number[]; + const parkedParentSource: FlowParkedParentSource[] = []; + if (flow.activeStatus && flowLink.length > 0) { + this.getParketParents(flow, flowLink, models, parkedParentSource); + } + + // TODO: change and use flow.depth to verify (depth > 0) + const childIDs: number[] = flowLinksMap + .get(flow.id) + ?.map((flowLink) => flowLink.childID.valueOf()) as number[]; const parentIDs: number[] = flowLinksMap .get(flow.id) @@ -152,6 +163,7 @@ export class FlowSearchService { id: flow.id.valueOf(), versionID: flow.versionID, amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt.toISOString(), updatedAt: flow.updatedAt.toISOString(), activeStatus: flow.activeStatus, restricted: flow.restricted, @@ -167,7 +179,7 @@ export class FlowSearchService { origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', externalReferences, reportDetails, - parkedParentSource: 'placeholder', + parkedParentSource, // Paged item field cursor: flow.id.valueOf(), }; @@ -214,4 +226,46 @@ export class FlowSearchService { } }); } + + private async getParketParents( + flow: any, + flowLink: any[], + models: Database, + parkedParentSource: FlowParkedParentSource[] + ): Promise { + const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); + + const flowLinksParent = flowLinksDepth0.filter( + (flowLink) => flowLink.parentID === flow.id + ); + + const parentFlowIds = flowLinksParent.map((flowLink) => + flowLink.parentID.valueOf() + ); + + const categories = await models.category.find({ + where: { + group: 'flowType', + name: 'parked', + }, + }); + + const categoriesIDs = categories.map((category) => category.id); + + const categoryRef = await models.categoryRef.find({ + where: { + categoryID: { + [Op.IN]: categoriesIDs, + }, + versionID: flow.versionID, + }, + }); + + const parentFlows = flowLinksParent.filter((flowLink) => { + return categoryRef.some( + (categoryRef) => + categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() + ); + }); + } } diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 342a0df9..394a8915 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -1,4 +1,4 @@ -import Flow, { FlowSearchResult, FlowSortField } from './types'; +import { FlowPaged, FlowSearchResult, FlowSortField } from './types'; import { Service } from 'typedi'; import { Arg, Args, Ctx, Query, Resolver } from 'type-graphql'; import { FlowSearchService } from '../flow-search-service'; @@ -7,7 +7,7 @@ import { SearchFlowsFilters } from './args'; import { PaginationArgs } from '../../../utils/graphql/pagination'; @Service() -@Resolver(Flow) +@Resolver(FlowPaged) export default class FlowResolver { constructor(private flowSearchService: FlowSearchService) {} diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 105b9c7a..b36c0113 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -1,98 +1,12 @@ import { Field, ObjectType } from 'type-graphql'; -import { ItemPaged, PageInfo } from '../../../utils/graphql/pagination'; - -@ObjectType() -export class FlowCategoryRef { - @Field({ nullable: false }) - objectID: number; - - @Field({ nullable: false }) - versionID: number; - - @Field({ nullable: false }) - objectType: string; - - @Field({ nullable: false }) - categoryID: number; - - @Field({ nullable: false }) - createdAt: string; - - @Field({ nullable: false }) - updatedAt: string; -} - -@ObjectType() -export class FlowCategory { - @Field({ nullable: true }) - id: number; - - @Field({ nullable: false }) - name: string; - - @Field({ nullable: false }) - group: string; - - @Field({ nullable: true }) - createdAt: string; - - @Field({ nullable: true }) - updatedAt: string; - - @Field({ nullable: true }) - description: string; - - @Field({ nullable: true }) - parentID: number; - - @Field({ nullable: true }) - code: string; - - @Field({ nullable: true }) - includeTotals: boolean; - - @Field(() => FlowCategoryRef, { nullable: true }) - categoryRef: FlowCategoryRef; -} - -@ObjectType() -export class FlowOrganization { - @Field({ nullable: false }) - id: number; - - @Field({ nullable: false }) - refDirection: string; - - @Field({ nullable: false }) - name: string; -} - -@ObjectType() -export class FlowLocation { - @Field({ nullable: false }) - id: number; - - @Field({ nullable: false }) - name: string; -} - -@ObjectType() -export class FlowPlan { - @Field({ nullable: false }) - id: number; - - @Field({ nullable: false }) - name: string; -} - -@ObjectType() -export class FlowUsageYear { - @Field({ nullable: false }) - year: string; - - @Field({ nullable: false }) - direction: string; -} +import { IItemPaged, PageInfo } from '../../../utils/graphql/pagination'; +import { Category } from '../../categories/graphql/types'; +import { BaseLocation } from '../../location/graphql/types'; +import { Organization } from '../../organizations/graphql/types'; +import { BasePlan } from '../../plans/graphql/types'; +import { ReportDetail } from '../../report-details/graphql/types'; +import { UsageYear } from '../../usage-years/grpahql/types'; +import { BaseType } from '../../../utils/graphql/base-types'; @ObjectType() export class FlowExternalReference { @@ -119,47 +33,16 @@ export class FlowExternalReference { } @ObjectType() -export class FlowReportDetail { - @Field({ nullable: false }) - id: number; - - @Field({ nullable: false }) - flowID: number; +export class FlowParkedParentSource { + @Field(() => [Number], { nullable: false }) + organization: number[]; - @Field({ nullable: false }) - versionID: number; - - @Field({ nullable: false }) - contactInfo: string; - - @Field({ nullable: false }) - source: string; - - @Field({ nullable: false }) - date: string; - - @Field({ nullable: false }) - sourceID: string; - - @Field({ nullable: false }) - refCode: string; - - @Field({ nullable: false }) - verified: boolean; - - @Field({ nullable: false }) - createdAt: string; - - @Field({ nullable: false }) - updatedAt: string; - - @Field({ nullable: false }) - organizationID: number; + @Field(() => [String], { nullable: false }) + orgName: string[]; } @ObjectType() -export default class Flow extends ItemPaged { - // Mandatory fields +export class BaseFlow extends BaseType { @Field({ nullable: false }) id: number; @@ -169,30 +52,29 @@ export default class Flow extends ItemPaged { @Field({ nullable: false }) amountUSD: string; - @Field({ nullable: false }) - updatedAt: string; - @Field({ nullable: false }) activeStatus: boolean; @Field({ nullable: false }) restricted: boolean; +} - // Optional fields - @Field(() => [FlowCategory], { nullable: true }) - categories: FlowCategory[]; +@ObjectType() +export class Flow extends BaseFlow { + @Field(() => [Category], { nullable: true }) + categories: Category[]; - @Field(() => [FlowOrganization], { nullable: true }) - organizations: FlowOrganization[]; + @Field(() => [Organization], { nullable: true }) + organizations: Organization[]; - @Field(() => [FlowPlan], { nullable: true }) - plans: FlowPlan[]; + @Field(() => [BasePlan], { nullable: true }) + plans: BasePlan[]; - @Field(() => [FlowLocation], { nullable: true }) - locations: FlowLocation[]; + @Field(() => [BaseLocation], { nullable: true }) + locations: BaseLocation[]; - @Field(() => [FlowUsageYear], { nullable: true }) - usageYears: FlowUsageYear[]; + @Field(() => [UsageYear], { nullable: true }) + usageYears: UsageYear[]; @Field(() => [Number], { nullable: true }) childIDs: number[]; @@ -209,18 +91,23 @@ export default class Flow extends ItemPaged { @Field(() => [FlowExternalReference], { nullable: true }) externalReferences: FlowExternalReference[]; - @Field(() => [FlowReportDetail], { nullable: true }) - reportDetails: FlowReportDetail[]; + @Field(() => [ReportDetail], { nullable: true }) + reportDetails: ReportDetail[]; - // Missing fields & new Types - @Field({ nullable: true }) - parkedParentSource: string; + @Field(() => [FlowParkedParentSource], { nullable: true }) + parkedParentSource: FlowParkedParentSource[]; +} + +@ObjectType() +export class FlowPaged extends Flow implements IItemPaged { + @Field({ nullable: false }) + cursor: number; } @ObjectType() export class FlowSearchResult extends PageInfo { - @Field(() => [Flow], { nullable: false }) - flows: Flow[]; + @Field(() => [FlowPaged], { nullable: false }) + flows: FlowPaged[]; } export type FlowSortField = @@ -230,7 +117,6 @@ export type FlowSortField = | 'updatedAt' | 'activeStatus' | 'restricted' - // | 'newMoney' | 'flowDate' | 'decisionDate' diff --git a/src/domain-services/location/graphql/types.ts b/src/domain-services/location/graphql/types.ts index c00b648a..76f10cf3 100644 --- a/src/domain-services/location/graphql/types.ts +++ b/src/domain-services/location/graphql/types.ts @@ -1,7 +1,7 @@ +import { BaseType } from '../../../utils/graphql/base-types'; import { Brand } from '@unocha/hpc-api-core/src/util/types'; import { MaxLength } from 'class-validator'; import { Field, ID, Int, ObjectType, registerEnumType } from 'type-graphql'; -import { BaseType } from '../../base-types'; export enum LocationStatus { active = 'active', @@ -53,3 +53,15 @@ export default class Location extends BaseType { @Field({ defaultValue: true }) itosSync: boolean; // Accidentally optional } + +@ObjectType() +export class BaseLocation extends BaseType { + @Field({ nullable: false }) + id: number; + + @Field(() => String, { nullable: false }) + name: string | null; + + @Field({ nullable: false }) + direction: string; +} diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index 904e3609..5221529c 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -2,8 +2,9 @@ import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { FlowLocation } from '../flows/graphql/types'; +import { BaseLocation } from './graphql/types'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { LocationId } from '@unocha/hpc-api-core/src/db/models/location'; @Service() export class LocationService { @@ -30,39 +31,50 @@ export class LocationService { } async getLocationsForFlows( - locationsFO: any[], + locationsFO: InstanceDataOfModel[], models: Database - ): Promise> { - const locations = await models.location.find({ - where: { - id: { - [Op.IN]: locationsFO.map((locFO) => locFO.objectID), + ): Promise>> { + const locationObjectsIDs: LocationId[] = locationsFO.map((locFO) => + createBrandedValue(locFO.objectID) + ); + + const locations: InstanceDataOfModel[] = + await models.location.find({ + where: { + id: { + [Op.IN]: locationObjectsIDs, + }, }, - }, - }); + }); - const locationsMap = new Map(); + const locationsMap = new Map>(); locationsFO.forEach((locFO) => { const flowId = locFO.flowID; if (!locationsMap.has(flowId)) { - locationsMap.set(flowId, []); + locationsMap.set(flowId, new Set()); } const location = locations.find((loc) => loc.id === locFO.objectID); if (!location) { throw new Error(`Location with ID ${locFO.objectID} does not exist`); } - const locationMapped = this.mapLocationsToFlowLocations(location); - locationsMap.get(flowId)!.push(locationMapped); + const locationMapped = this.mapLocationsToFlowLocations(location, locFO); + locationsMap.get(flowId)!.add(locationMapped); }); return locationsMap; } - private mapLocationsToFlowLocations(location: any) { + private mapLocationsToFlowLocations( + location: InstanceDataOfModel, + locationFO: InstanceDataOfModel + ) { return { id: location.id, name: location.name, + direction: locationFO.refDirection, + createdAt: location.createdAt.toISOString(), + updatedAt: location.updatedAt.toISOString(), }; } } diff --git a/src/domain-services/organizations/graphql/types.ts b/src/domain-services/organizations/graphql/types.ts new file mode 100644 index 00000000..e4a52057 --- /dev/null +++ b/src/domain-services/organizations/graphql/types.ts @@ -0,0 +1,14 @@ +import { Field, ObjectType } from 'type-graphql'; +import { BaseType } from '../../../utils/graphql/base-types'; + +@ObjectType() +export class Organization extends BaseType { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: true }) + direction: string; + + @Field({ nullable: true }) + name: string; +} diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 215045ff..1bac9e1d 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -1,6 +1,7 @@ import { Database } from '@unocha/hpc-api-core/src/db'; import { Service } from 'typedi'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Organization } from './graphql/types'; @Service() export class OrganizationService { @@ -13,10 +14,11 @@ export class OrganizationService { }, }); - const organizationsMap = new Map(); + const organizationsMap = new Map(); organizationsFO.forEach((orgFO) => { const flowId = orgFO.flowID; + if (!organizationsMap.has(flowId)) { organizationsMap.set(flowId, []); } @@ -29,17 +31,14 @@ export class OrganizationService { `Organization with ID ${orgFO.objectID} does not exist` ); } - organizationsMap.get(flowId)!.push(organization); - }); - organizations.forEach((org) => { - const refDirection = organizationsFO.find( - (orgFO) => orgFO.objectID === org.id - ).refDirection; - - organizationsMap.set( - org.id.valueOf(), - this.mapOrganizationsToOrganizationFlows(org, refDirection) - ); + + const organizationMapped: Organization = + this.mapOrganizationsToOrganizationFlows( + organization, + orgFO.refDirection + ); + + organizationsMap.get(flowId)!.push(organizationMapped); }); return organizationsMap; @@ -48,11 +47,13 @@ export class OrganizationService { private mapOrganizationsToOrganizationFlows( organization: any, refDirection: any - ) { + ): Organization { return { id: organization.id, - refDirection: refDirection, + direction: refDirection, name: organization.name, + createdAt: organization.createdAt.toISOString(), + updatedAt: organization.updatedAt.toISOString(), }; } } diff --git a/src/domain-services/plans/graphql/types.ts b/src/domain-services/plans/graphql/types.ts index c947291d..3316b717 100644 --- a/src/domain-services/plans/graphql/types.ts +++ b/src/domain-services/plans/graphql/types.ts @@ -2,6 +2,7 @@ import { Brand } from '@unocha/hpc-api-core/src/util/types'; import { MaxLength } from 'class-validator'; import { Field, ID, Int, ObjectType } from 'type-graphql'; import PlanTag from '../../plan-tag/graphql/types'; +import { BaseType } from '../../../utils/graphql/base-types'; @ObjectType() export class PlanCaseload { @@ -95,3 +96,15 @@ export default class Plan { @Field(() => [PlanTag]) tags: PlanTag[]; } + +@ObjectType() +export class BasePlan extends BaseType { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + name: string; + + @Field({ nullable: false }) + direction: string; +} diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index 63d1c182..09509563 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -4,7 +4,8 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { NotFoundError } from '@unocha/hpc-api-core/src/util/error'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { FlowPlan } from '../flows/graphql/types'; +import { BasePlan } from './graphql/types'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; @Service() export class PlanService { @@ -48,18 +49,22 @@ export class PlanService { } async getPlansForFlows( - plansFO: any[], + plansFO: InstanceDataOfModel[], models: Database - ): Promise> { - const plans = await models.plan.find({ - where: { - id: { - [Op.IN]: plansFO.map((planFO) => planFO.objectID), + ): Promise> { + const planObjectsIDs: PlanId[] = plansFO.map((planFO) => + createBrandedValue(planFO.objectID) + ); + const plans: InstanceDataOfModel[] = + await models.plan.find({ + where: { + id: { + [Op.IN]: planObjectsIDs, + }, }, - }, - }); + }); - const plansMap = new Map(); + const plansMap = new Map(); for (const plan of plans) { const planVersion = await models.planVersion.find({ @@ -69,22 +74,37 @@ export class PlanService { }, }); - const planMapped = { - id: plan.id.valueOf(), - name: planVersion[0].name, - }; - - const flowId = plansFO.find( + + const planFlowOobject = plansFO.find( (planFO) => planFO.objectID === plan.id - ).flowID; - - if (!plansMap.has(flowId)) { - plansMap.set(flowId, []); + ); + + const flowId = planFlowOobject && planFlowOobject.flowID; + + const planMapped = this.mapPlansToFlowPlans(plan, planVersion[0], planFlowOobject?.refDirection || null); + + if (flowId) { + if (!plansMap.has(flowId)) { + plansMap.set(flowId, []); + } + + plansMap.get(flowId)!.push(planMapped); } - - plansMap.get(flowId)!.push(planMapped); } return plansMap; } + + private mapPlansToFlowPlans( + plan: InstanceDataOfModel, + planVersion: InstanceDataOfModel, + direction: string | null): BasePlan { + return { + id: plan.id.valueOf(), + name: planVersion.name, + createdAt: plan.createdAt.toISOString(), + updatedAt: plan.updatedAt.toISOString(), + direction: direction ?? '', + }; + } } diff --git a/src/domain-services/report-details/graphql/types.ts b/src/domain-services/report-details/graphql/types.ts new file mode 100644 index 00000000..3d4ddfa9 --- /dev/null +++ b/src/domain-services/report-details/graphql/types.ts @@ -0,0 +1,35 @@ +import { Field, ObjectType } from 'type-graphql'; +import { BaseType } from '../../../utils/graphql/base-types'; + +@ObjectType() +export class ReportDetail extends BaseType { + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + flowID: number; + + @Field({ nullable: false }) + versionID: number; + + @Field(() => String, { nullable: true }) + contactInfo: string | null; + + @Field({ nullable: false }) + source: string; + + @Field(() => String, { nullable: true }) + date: string | null; + + @Field(() => String, { nullable: true }) + sourceID: string | null; + + @Field(() => String, { nullable: true }) + refCode: string | null; + + @Field({ nullable: false }) + verified: boolean; + + @Field(() => Number, { nullable: true }) + organizationID: number | null; +} diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index ce347b2b..70b8b015 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -3,13 +3,13 @@ import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; -import { FlowReportDetail } from '../flows/graphql/types'; +import { ReportDetail } from './graphql/types'; @Service() export class ReportDetailService { async getReportDetailsForFlows( flowIds: FlowId[], models: Database - ): Promise> { + ): Promise> { const reportDetails: InstanceDataOfModel[] = await models.reportDetail.find({ where: { @@ -20,7 +20,7 @@ export class ReportDetailService { skipValidation: true, }); - const reportDetailsMap = new Map(); + const reportDetailsMap = new Map(); flowIds.forEach((flowId: FlowId) => { if (!reportDetailsMap.has(flowId)) { @@ -41,21 +41,21 @@ export class ReportDetailService { } private mapReportDetailsToFlowReportDetail( - reportDetail: any - ): FlowReportDetail { + reportDetail: InstanceDataOfModel + ): ReportDetail { return { id: reportDetail.id, - flowID: reportDetail.flowId, + flowID: reportDetail.flowID, versionID: reportDetail.versionID, contactInfo: reportDetail.contactInfo, source: reportDetail.source, - date: reportDetail.date.toISOString(), + date: reportDetail.date, sourceID: reportDetail.sourceID, refCode: reportDetail.refCode, verified: reportDetail.verified, createdAt: reportDetail.createdAt.toISOString(), updatedAt: reportDetail.updatedAt.toISOString(), - organizationID: reportDetail.organizationId, + organizationID: reportDetail.organizationID, }; } } diff --git a/src/domain-services/usage-years/grpahql/types.ts b/src/domain-services/usage-years/grpahql/types.ts new file mode 100644 index 00000000..470e06b3 --- /dev/null +++ b/src/domain-services/usage-years/grpahql/types.ts @@ -0,0 +1,11 @@ +import { Field, ObjectType } from 'type-graphql'; +import { BaseType } from '../../../utils/graphql/base-types'; + +@ObjectType() +export class UsageYear extends BaseType { + @Field({ nullable: false }) + year: string; + + @Field({ nullable: false }) + direction: string; +} diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index e09197d2..bb74c9a6 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -1,23 +1,25 @@ import { Database } from '@unocha/hpc-api-core/src/db'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; -import { FlowUsageYear } from '../flows/graphql/types'; +import { UsageYear } from './grpahql/types'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; @Service() export class UsageYearService { async getUsageYearsForFlows( usageYearsFO: any[], models: Database - ): Promise> { - const usageYears = await models.usageYear.find({ - where: { - id: { - [Op.IN]: usageYearsFO.map((usageYearFO) => usageYearFO.objectID), + ): Promise> { + const usageYears: InstanceDataOfModel[] = + await models.usageYear.find({ + where: { + id: { + [Op.IN]: usageYearsFO.map((usageYearFO) => usageYearFO.objectID), + }, }, - }, - }); + }); - const usageYearsMap = new Map(); + const usageYearsMap = new Map(); usageYearsFO.forEach((usageYearFO) => { const flowId = usageYearFO.flowID; @@ -25,7 +27,7 @@ export class UsageYearService { usageYearsMap.set(flowId, []); } const usageYear = usageYears.find( - (usageYear) => usageYear.id === usageYearFO.objectID + (uYear) => uYear.id === usageYearFO.objectID ); if (!usageYear) { @@ -47,6 +49,8 @@ export class UsageYearService { return { year: usageYear.year, direction: refDirection, + createdAt: usageYear.createdAt, + updatedAt: usageYear.updatedAt, }; } } diff --git a/src/domain-services/base-types.ts b/src/utils/graphql/base-types.ts similarity index 61% rename from src/domain-services/base-types.ts rename to src/utils/graphql/base-types.ts index 24db78d8..749bc046 100644 --- a/src/domain-services/base-types.ts +++ b/src/utils/graphql/base-types.ts @@ -2,11 +2,11 @@ import { Field, ObjectType } from 'type-graphql'; @ObjectType() export class BaseType { - @Field() - createdAt: Date; + @Field(() => String, { nullable: false }) + createdAt: string; - @Field() - updatedAt: Date; + @Field(() => String, { nullable: false }) + updatedAt: string; } @ObjectType() diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index 650ddaba..c4765253 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -8,12 +8,6 @@ export interface IItemPaged { cursor: number; } -@ObjectType() -export class ItemPaged implements IItemPaged { - @Field({ nullable: false }) - cursor: number; -} - @ObjectType() export class PageInfo { @Field({ nullable: false }) From 00230c0ee12e42bd477c7a6ac5e7588f804dc38d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 10 Nov 2023 11:51:05 +0100 Subject: [PATCH 20/67] Apply Strategy Pattern to FlowSearch service. Simplify resolver args. --- .../flows/flow-link-service.ts | 5 +- .../flows/flow-search-service.ts | 316 ++++++------------ src/domain-services/flows/flow-service.ts | 24 ++ src/domain-services/flows/graphql/args.ts | 63 +++- src/domain-services/flows/graphql/resolver.ts | 23 +- .../flows/strategy/flow-search-strategy.ts | 6 + .../impl/only-flow-conditions-strategy.ts | 252 ++++++++++++++ .../organizations/graphql/types.ts | 3 + .../organizations/organization-service.ts | 1 + 9 files changed, 450 insertions(+), 243 deletions(-) create mode 100644 src/domain-services/flows/flow-service.ts create mode 100644 src/domain-services/flows/strategy/flow-search-strategy.ts create mode 100644 src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts diff --git a/src/domain-services/flows/flow-link-service.ts b/src/domain-services/flows/flow-link-service.ts index 1b21e7fd..eac0e356 100644 --- a/src/domain-services/flows/flow-link-service.ts +++ b/src/domain-services/flows/flow-link-service.ts @@ -1,6 +1,7 @@ import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Database } from '@unocha/hpc-api-core/src/db/type'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; import { Service } from 'typedi'; @Service() @@ -8,7 +9,7 @@ export class FlowLinkService { async getFlowLinksForFlows( flowIds: FlowId[], models: Database - ): Promise> { + ): Promise[]>> { const flowLinks = await models.flowLink.find({ where: { childID: { @@ -18,7 +19,7 @@ export class FlowLinkService { }); // Group flowLinks by flow ID for easy mapping - const flowLinksMap = new Map(); + const flowLinksMap = new Map[]>(); // Populate the map with flowLinks for each flow flowLinks.forEach((flowLink) => { diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index e548fbd3..e38ca553 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,271 +1,145 @@ +import { Database } from '@unocha/hpc-api-core/src/db/type'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { - FlowParkedParentSource, - FlowSearchResult, - FlowSortField, + FlowObjectFilters, + SearchFlowsArgs, + SearchFlowsFilters, +} from './graphql/args'; +import { + FlowSearchResult } from './graphql/types'; -import { Database } from '@unocha/hpc-api-core/src/db/type'; -import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; -import { OrganizationService } from '../organizations/organization-service'; -import { LocationService } from '../location/location-service'; -import { PlanService } from '../plans/plan-service'; -import { UsageYearService } from '../usage-years/usage-year-service'; -import { CategoryService } from '../categories/category-service'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { FlowLinkService } from './flow-link-service'; -import { ExternalReferenceService } from '../external-reference/external-reference-service'; -import { ReportDetailService } from '../report-details/report-detail-service'; +import { FlowSearchStrategy } from './strategy/flow-search-strategy'; +import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; @Service() export class FlowSearchService { constructor( - private readonly organizationService: OrganizationService, - private readonly locationService: LocationService, - private readonly planService: PlanService, - private readonly usageYearService: UsageYearService, - private readonly categoryService: CategoryService, - private readonly flowLinkService: FlowLinkService, - private readonly externalReferenceService: ExternalReferenceService, - private readonly reportDetailService: ReportDetailService - ) {} + private readonly onlyFlowFiltersStrategy: OnlyFlowFiltersStrategy + ) { } async search( models: Database, - limit: number = 50, - sortOrder: 'asc' | 'desc' = 'desc', - sortField: FlowSortField = 'id', - afterCursor?: number, - beforeCursor?: number, - filters?: any + filters: SearchFlowsArgs ): Promise { + const { limit, afterCursor, beforeCursor, sortField, sortOrder } = filters; + if (beforeCursor && afterCursor) { throw new Error('Cannot use before and after cursor at the same time'); } - const sortCondition = { - column: sortField, - order: sortOrder, - }; - - const limitComputed = limit + 1; // Fetch one more item to check for hasNextPage - - let condition; + let cursorCondition; if (afterCursor) { - condition = { + cursorCondition = { id: { [Op.GT]: createBrandedValue(afterCursor), }, }; } else if (beforeCursor) { - condition = { + cursorCondition = { id: { [Op.LT]: createBrandedValue(beforeCursor), }, }; } - if (filters?.activeStatus !== undefined) { - condition = { - ...condition, - activeStatus: filters.activeStatus, - }; - } - - const [flows, countRes] = await Promise.all([ - models.flow.find({ - orderBy: sortCondition, - limit: limitComputed, - where: condition, - }), - models.flow.count({ where: condition }), - ]); + const orderBy = { + column: sortField??'updatedAt', + order: sortOrder ?? 'desc', + }; - const hasNextPage = flows.length > limit; - if (hasNextPage) { - flows.pop(); // Remove the extra item used to check hasNextPage + const { flowFilters, flowObjectFilters } = filters; + + let onlyFlowFilters = false; + let onlyFlowObjectFilters = false; + let bothFlowFilters = false; + + if ( + (!flowFilters && !flowObjectFilters) || + (flowFilters && !flowObjectFilters) + ) { + onlyFlowFilters = true; + } else if (!flowFilters && flowObjectFilters) { + onlyFlowObjectFilters = true; + } else if (flowFilters && flowObjectFilters) { + bothFlowFilters = true; } - const count = countRes[0] as { count: number }; + let conditions: any = { ...cursorCondition }; + const strategy = this.determineStrategy(flowFilters, flowObjectFilters, conditions); - const flowIds: FlowId[] = flows.map((flow) => flow.id); + return await strategy.search(conditions, orderBy, limit, cursorCondition, models); - const organizationsFO: any[] = []; - const locationsFO: any[] = []; - const plansFO: any[] = []; - const usageYearsFO: any[] = []; + } - const [externalReferencesMap] = await Promise.all([ - this.externalReferenceService.getExternalReferencesForFlows( - flowIds, - models - ), - this.getFlowObjects( - flowIds, - models, - organizationsFO, - locationsFO, - plansFO, - usageYearsFO - ), - ]); + - const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( - flowIds, - models - ); + - const [ - categoriesMap, - organizationsMap, - locationsMap, - plansMap, - usageYearsMap, - reportDetailsMap, - ] = await Promise.all([ - this.categoryService.getCategoriesForFlows(flowLinksMap, models), - this.organizationService.getOrganizationsForFlows( - organizationsFO, - models - ), - this.locationService.getLocationsForFlows(locationsFO, models), - this.planService.getPlansForFlows(plansFO, models), - this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), - this.reportDetailService.getReportDetailsForFlows(flowIds, models), - ]); + private prepareFlowObjectConditions( + flowObjectFilters: FlowObjectFilters[] + ): Map> { + const flowObjectConditions = new Map>(); - const items = flows.map((flow) => { - const flowLink = flowLinksMap.get(flow.id) || []; - const categories = categoriesMap.get(flow.id) || []; - const organizations = organizationsMap.get(flow.id) || []; - const locations = [...locationsMap.get(flow.id) || []] ; - const plans = plansMap.get(flow.id) || []; - const usageYears = usageYearsMap.get(flow.id) || []; - const externalReferences = externalReferencesMap.get(flow.id) || []; - const reportDetails = reportDetailsMap.get(flow.id) || []; + for (const flowObjectFilter of flowObjectFilters || []) { + const objectType = flowObjectFilter.objectType; + const direction = flowObjectFilter.direction; + const objectID = flowObjectFilter.objectID; - const parkedParentSource: FlowParkedParentSource[] = []; - if (flow.activeStatus && flowLink.length > 0) { - this.getParketParents(flow, flowLink, models, parkedParentSource); + // Ensure the map for the objectType is initialized + if (!flowObjectConditions.has(objectType)) { + flowObjectConditions.set(objectType, new Map()); } - // TODO: change and use flow.depth to verify (depth > 0) - const childIDs: number[] = flowLinksMap - .get(flow.id) - ?.map((flowLink) => flowLink.childID.valueOf()) as number[]; - - const parentIDs: number[] = flowLinksMap - .get(flow.id) - ?.map((flowLink) => flowLink.parentID.valueOf()) as number[]; + const flowObjectCondition = flowObjectConditions.get(objectType); - return { - // Mandatory fields - id: flow.id.valueOf(), - versionID: flow.versionID, - amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt.toISOString(), - updatedAt: flow.updatedAt.toISOString(), - activeStatus: flow.activeStatus, - restricted: flow.restricted, - // Optional fields - categories, - organizations, - locations, - plans, - usageYears, - childIDs, - parentIDs, - origAmount: flow.origAmount ? flow.origAmount.toString() : '', - origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', - externalReferences, - reportDetails, - parkedParentSource, - // Paged item field - cursor: flow.id.valueOf(), - }; - }); - - return { - flows: items, - hasNextPage: limit <= flows.length, - hasPreviousPage: afterCursor !== undefined, - startCursor: flows.length ? flows[0].id.valueOf() : 0, - endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, - pageSize: flows.length, - sortField: sortCondition.column, - sortOrder: sortCondition.order, - total: count.count, - }; - } - - private async getFlowObjects( - flowIds: FlowId[], - models: Database, - organizationsFO: any[], - locationsFO: any[], - plansFO: any[], - usageYearsFO: any[] - ): Promise { - const flowObjects = await models.flowObject.find({ - where: { - flowID: { - [Op.IN]: flowIds, - }, - }, - }); - - flowObjects.forEach((flowObject) => { - if (flowObject.objectType === 'organization') { - organizationsFO.push(flowObject); - } else if (flowObject.objectType === 'location') { - locationsFO.push(flowObject); - } else if (flowObject.objectType === 'plan') { - plansFO.push(flowObject); - } else if (flowObject.objectType === 'usageYear') { - usageYearsFO.push(flowObject); + // Ensure the map for the direction is initialized + if (!flowObjectCondition!.has(direction)) { + flowObjectCondition!.set(direction, []); } - }); - } - private async getParketParents( - flow: any, - flowLink: any[], - models: Database, - parkedParentSource: FlowParkedParentSource[] - ): Promise { - const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); + const flowObjectDirectionCondition = flowObjectCondition!.get(direction); - const flowLinksParent = flowLinksDepth0.filter( - (flowLink) => flowLink.parentID === flow.id - ); + // Add the objectID to the array + flowObjectDirectionCondition!.push(objectID); + } - const parentFlowIds = flowLinksParent.map((flowLink) => - flowLink.parentID.valueOf() - ); + return flowObjectConditions; + } - const categories = await models.category.find({ - where: { - group: 'flowType', - name: 'parked', - }, - }); + private prepareFlowConditions(flowFilters: SearchFlowsFilters): Map { + const flowConditions = new Map(); - const categoriesIDs = categories.map((category) => category.id); + if (flowFilters) { + Object.entries(flowFilters).forEach(([key, value]) => { + if (value !== undefined) { + flowConditions.set(key, value); + } + }); + } - const categoryRef = await models.categoryRef.find({ - where: { - categoryID: { - [Op.IN]: categoriesIDs, - }, - versionID: flow.versionID, - }, - }); + return flowConditions; + } - const parentFlows = flowLinksParent.filter((flowLink) => { - return categoryRef.some( - (categoryRef) => - categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() - ); - }); + private determineStrategy(flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], conditions: any): FlowSearchStrategy { + if ((!flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0)) || (flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0))) { + const flowConditions = this.prepareFlowConditions(flowFilters); + conditions = { ...conditions, ...flowConditions } + return this.onlyFlowFiltersStrategy; + } + // else if (!flowFilters && flowObjectFilters.length !== 0) { + // const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); + // conditions = {...conditions, ...flowObjectConditions} + // return new OnlyFlowObjectFiltersStrategy(this); + // } else if (flowFilters && flowObjectFilters.length !== 0) { + // const flowConditions = this.prepareFlowConditions(flowFilters); + // const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); + // conditions = {...conditions, ...flowConditions, ...flowObjectConditions} + // return new BothFlowFiltersStrategy(this); + // } + + throw new Error('Invalid combination of flowFilters and flowObjectFilters - temp: only provide flowFilters'); } + } diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts new file mode 100644 index 00000000..536ea3d8 --- /dev/null +++ b/src/domain-services/flows/flow-service.ts @@ -0,0 +1,24 @@ +import { Service } from 'typedi'; +import { Database } from '@unocha/hpc-api-core/src/db/type'; + +@Service() +export class FlowService { + constructor() {} + + async getFlows( + models: Database, + conditions: any, + orderBy: any, + limit: number + ) { + return await models.flow.find({ + orderBy, + limit, + where: conditions, + }); + } + + async getFlowsCount(models: Database, conditions: any) { + return await models.flow.count({ where: conditions }); + } +} diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index d55b3137..f07ffb0c 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -4,12 +4,73 @@ import { PaginationArgs } from '../../../utils/graphql/pagination'; @InputType() export class SearchFlowsFilters { + @Field({ nullable: true }) + id: number; + @Field({ nullable: true }) activeStatus: boolean; + + @Field({ nullable: true }) + status: 'commitment' | 'paid' | 'pledged'; + + @Field({ nullable: true }) + type: 'carryover' | 'parked' | 'pass_through' | 'standard'; + + @Field({ nullable: true }) + amountUSD: number; + + @Field({ nullable: true }) + reporterReferenceCode: number; + + @Field({ nullable: true }) + sourceSystemId: number; + + @Field({ nullable: true }) + legacyId: number; +} + +@InputType() +export class FlowObjectFilters { + @Field({ nullable: false }) + objectID: number; + + @Field({ nullable: false }) + direction: 'source' | 'destination'; + + @Field({ nullable: false }) + objectType: + | 'location' + | 'organization' + | 'plan' + | 'usageYear' + | 'category' + | 'project' + | 'globalCluster' + | 'emergency'; +} + +@InputType() +export class FlowCategory{ + + @Field({ nullable: false }) + id: number; + + @Field({ nullable: false }) + group: string; + } @ArgsType() export class SearchFlowsArgs extends PaginationArgs { @Field(() => SearchFlowsFilters, { nullable: true }) - filters: SearchFlowsFilters; + flowFilters: SearchFlowsFilters; + + @Field(() => [FlowObjectFilters], { nullable: true }) + flowObjectFilters: FlowObjectFilters[]; + + @Field(() => [FlowCategory], { nullable: true }) + categoryFilters: FlowCategory[]; + + @Field({ nullable: true }) + includeChildrenOfParkedFlows: boolean; } diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 394a8915..80a96a64 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -3,8 +3,7 @@ import { Service } from 'typedi'; import { Arg, Args, Ctx, Query, Resolver } from 'type-graphql'; import { FlowSearchService } from '../flow-search-service'; import Context from '../../Context'; -import { SearchFlowsFilters } from './args'; -import { PaginationArgs } from '../../../utils/graphql/pagination'; +import { SearchFlowsArgs } from './args'; @Service() @Resolver(FlowPaged) @@ -14,23 +13,9 @@ export default class FlowResolver { @Query(() => FlowSearchResult) async searchFlows( @Ctx() context: Context, - @Args(() => PaginationArgs, { validate: false }) - pagination: PaginationArgs, - @Arg('activeStatus', { nullable: true }) activeStatus: boolean + @Args(() => SearchFlowsArgs, { validate: false }) + args: SearchFlowsArgs ): Promise { - const { limit, sortOrder, sortField, afterCursor, beforeCursor } = - pagination; - const filters: SearchFlowsFilters = { - activeStatus, - }; - return await this.flowSearchService.search( - context.models, - limit, - sortOrder, - sortField, - afterCursor, - beforeCursor, - filters - ); + return await this.flowSearchService.search(context.models, args); } } diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts new file mode 100644 index 00000000..b5d844a1 --- /dev/null +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -0,0 +1,6 @@ +import { Database } from "@unocha/hpc-api-core/src/db"; +import { FlowSearchResult } from "../graphql/types"; + +export interface FlowSearchStrategy{ + search(flowConditions: Map, orderBy: any, limit: number, cursorCondition: any, models: Database): Promise; +} \ No newline at end of file diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts new file mode 100644 index 00000000..652f0843 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -0,0 +1,252 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { Ctx } from 'type-graphql'; +import { Service } from 'typedi'; +import Context from '../../../Context'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { CategoryService } from '../../../categories/category-service'; +import { ExternalReferenceService } from '../../../external-reference/external-reference-service'; +import { LocationService } from '../../../location/location-service'; +import { OrganizationService } from '../../../organizations/organization-service'; +import { PlanService } from '../../../plans/plan-service'; +import { ReportDetailService } from '../../../report-details/report-detail-service'; +import { UsageYearService } from '../../../usage-years/usage-year-service'; +import { FlowLinkService } from '../../flow-link-service'; +import { FlowService } from '../../flow-service'; +import { FlowParkedParentSource, FlowSearchResult } from '../../graphql/types'; +import { FlowSearchStrategy } from '../flow-search-strategy'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; + +@Service() +export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { + + + constructor( + private readonly organizationService: OrganizationService, + private readonly locationService: LocationService, + private readonly planService: PlanService, + private readonly usageYearService: UsageYearService, + private readonly categoryService: CategoryService, + private readonly flowLinkService: FlowLinkService, + private readonly externalReferenceService: ExternalReferenceService, + private readonly reportDetailService: ReportDetailService, + private readonly flowService: FlowService) { + } + + async search(flowConditions: Map, orderBy: any, limit: number, cursorCondition: any, models: Database): Promise { + + // Fetch one more item to check for hasNextPage + const limitComputed = limit + 1; + + // Build conditions object + const conditions: any = { ...cursorCondition }; + + if (flowConditions.size > 0) { + flowConditions.forEach((value, key) => { + conditions[key] = value; + }); + } + + const [flows, countRes] = await Promise.all([ + this.flowService.getFlows( + models, + conditions, + orderBy, + limitComputed + ), + this.flowService.getFlowsCount(models, conditions), + ]); + + const hasNextPage = flows.length > limit; + if (hasNextPage) { + flows.pop(); // Remove the extra item used to check hasNextPage + } + + const count = countRes[0] as { count: number }; + + const flowIds: FlowId[] = flows.map((flow) => flow.id); + + const organizationsFO: any[] = []; + const locationsFO: any[] = []; + const plansFO: any[] = []; + const usageYearsFO: any[] = []; + + const [externalReferencesMap] = await Promise.all([ + this.externalReferenceService.getExternalReferencesForFlows( + flowIds, + models + ), + this.getFlowObjects( + flowIds, + models, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO + ), + ]); + + const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( + flowIds, + models + ); + + const [ + categoriesMap, + organizationsMap, + locationsMap, + plansMap, + usageYearsMap, + reportDetailsMap, + ] = await Promise.all([ + this.categoryService.getCategoriesForFlows(flowLinksMap, models), + this.organizationService.getOrganizationsForFlows( + organizationsFO, + models + ), + this.locationService.getLocationsForFlows(locationsFO, models), + this.planService.getPlansForFlows(plansFO, models), + this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), + this.reportDetailService.getReportDetailsForFlows(flowIds, models), + ]); + + const items = flows.map((flow) => { + const flowLink = flowLinksMap.get(flow.id) || []; + const categories = categoriesMap.get(flow.id) || []; + const organizations = organizationsMap.get(flow.id) || []; + const locations = [...(locationsMap.get(flow.id) || [])]; + const plans = plansMap.get(flow.id) || []; + const usageYears = usageYearsMap.get(flow.id) || []; + const externalReferences = externalReferencesMap.get(flow.id) || []; + const reportDetails = reportDetailsMap.get(flow.id) || []; + + const parkedParentSource: FlowParkedParentSource[] = []; + if (flow.activeStatus && flowLink.length > 0) { + this.getParketParents(flow, flowLink, models, parkedParentSource); + } + + const childIDs: number[] = flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.childID.valueOf()) as number[]; + + const parentIDs: number[] = flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.parentID.valueOf()) as number[]; + + return { + // Mandatory fields + id: flow.id.valueOf(), + versionID: flow.versionID, + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt.toISOString(), + updatedAt: flow.updatedAt.toISOString(), + activeStatus: flow.activeStatus, + restricted: flow.restricted, + // Optional fields + categories, + organizations, + locations, + plans, + usageYears, + childIDs, + parentIDs, + origAmount: flow.origAmount ? flow.origAmount.toString() : '', + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', + externalReferences, + reportDetails, + parkedParentSource, + // Paged item field + cursor: flow.id.valueOf(), + }; + }); + + return { + flows: items, + hasNextPage: limit <= flows.length, + hasPreviousPage: false,// TODO: cursorCondition['id'].GT !== undefined, + startCursor: flows.length ? flows[0].id.valueOf() : 0, + endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, + pageSize: flows.length, + sortField: orderBy.column, + sortOrder: orderBy.order, + total: count.count, + }; + } + + private async getFlowObjects( + flowIds: FlowId[], + models: Database, + organizationsFO: any[], + locationsFO: any[], + plansFO: any[], + usageYearsFO: any[] + ): Promise { + const flowObjects = await models.flowObject.find({ + where: { + flowID: { + [Op.IN]: flowIds, + }, + }, + }); + + flowObjects.forEach((flowObject) => { + if (flowObject.objectType === 'organization') { + organizationsFO.push(flowObject); + } else if (flowObject.objectType === 'location') { + locationsFO.push(flowObject); + } else if (flowObject.objectType === 'plan') { + plansFO.push(flowObject); + } else if (flowObject.objectType === 'usageYear') { + usageYearsFO.push(flowObject); + } + }); + } + + private async getParketParents( + flow: any, + flowLink: any[], + models: Database, + parkedParentSource: FlowParkedParentSource[] + ): Promise { + const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); + + const flowLinksParent = flowLinksDepth0.filter( + (flowLink) => flowLink.parentID === flow.id + ); + + const parentFlowIds = flowLinksParent.map((flowLink) => + flowLink.parentID.valueOf() + ); + + const categories = await models.category.find({ + where: { + group: 'flowType', + name: 'parked', + }, + }); + + const categoriesIDs = categories.map((category) => category.id); + + const categoryRef = await models.categoryRef.find({ + where: { + categoryID: { + [Op.IN]: categoriesIDs, + }, + versionID: flow.versionID, + }, + }); + + const parentFlows = flowLinksParent.filter((flowLink) => { + return categoryRef.some( + (categoryRef) => + categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() + ); + }); + } + +} \ No newline at end of file diff --git a/src/domain-services/organizations/graphql/types.ts b/src/domain-services/organizations/graphql/types.ts index e4a52057..c4ebdb23 100644 --- a/src/domain-services/organizations/graphql/types.ts +++ b/src/domain-services/organizations/graphql/types.ts @@ -11,4 +11,7 @@ export class Organization extends BaseType { @Field({ nullable: true }) name: string; + + @Field({ nullable: true }) + abbreviation: string; } diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 1bac9e1d..a35e6637 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -54,6 +54,7 @@ export class OrganizationService { name: organization.name, createdAt: organization.createdAt.toISOString(), updatedAt: organization.updatedAt.toISOString(), + abbreviation: organization.abbreviation, }; } } From c97a65be856068db8194adbca2e7a15588d185a3 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Sat, 11 Nov 2023 18:45:00 +0100 Subject: [PATCH 21/67] Minor types refactor. Add test for flowSeach service. --- .../flows/flow-link-service.ts | 5 +- .../flows/flow-search-service.ts | 74 ++++--- src/domain-services/flows/graphql/args.ts | 4 +- src/domain-services/flows/graphql/types.ts | 2 +- .../flows/strategy/flow-search-strategy.ts | 16 +- .../impl/only-flow-conditions-strategy.ts | 184 +++++++++--------- src/domain-services/plans/plan-service.ts | 16 +- tests/unit/flow-search-service.spec.ts | 73 +++++++ 8 files changed, 226 insertions(+), 148 deletions(-) create mode 100644 tests/unit/flow-search-service.spec.ts diff --git a/src/domain-services/flows/flow-link-service.ts b/src/domain-services/flows/flow-link-service.ts index eac0e356..8252340c 100644 --- a/src/domain-services/flows/flow-link-service.ts +++ b/src/domain-services/flows/flow-link-service.ts @@ -19,7 +19,10 @@ export class FlowLinkService { }); // Group flowLinks by flow ID for easy mapping - const flowLinksMap = new Map[]>(); + const flowLinksMap = new Map< + number, + InstanceOfModel[] + >(); // Populate the map with flowLinks for each flow flowLinks.forEach((flowLink) => { diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index e38ca553..c7e6aa7d 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -7,9 +7,7 @@ import { SearchFlowsArgs, SearchFlowsFilters, } from './graphql/args'; -import { - FlowSearchResult -} from './graphql/types'; +import { FlowSearchResult } from './graphql/types'; import { FlowSearchStrategy } from './strategy/flow-search-strategy'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; @@ -17,7 +15,7 @@ import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-st export class FlowSearchService { constructor( private readonly onlyFlowFiltersStrategy: OnlyFlowFiltersStrategy - ) { } + ) {} async search( models: Database, @@ -45,39 +43,28 @@ export class FlowSearchService { } const orderBy = { - column: sortField??'updatedAt', + column: sortField ?? 'updatedAt', order: sortOrder ?? 'desc', }; const { flowFilters, flowObjectFilters } = filters; - let onlyFlowFilters = false; - let onlyFlowObjectFilters = false; - let bothFlowFilters = false; - - if ( - (!flowFilters && !flowObjectFilters) || - (flowFilters && !flowObjectFilters) - ) { - onlyFlowFilters = true; - } else if (!flowFilters && flowObjectFilters) { - onlyFlowObjectFilters = true; - } else if (flowFilters && flowObjectFilters) { - bothFlowFilters = true; - } - - let conditions: any = { ...cursorCondition }; - const strategy = this.determineStrategy(flowFilters, flowObjectFilters, conditions); - - return await strategy.search(conditions, orderBy, limit, cursorCondition, models); - + const { strategy, conditions } = this.determineStrategy( + flowFilters, + flowObjectFilters, + cursorCondition + ); + + return await strategy.search( + conditions, + orderBy, + limit, + cursorCondition, + models + ); } - - - - - private prepareFlowObjectConditions( + prepareFlowObjectConditions( flowObjectFilters: FlowObjectFilters[] ): Map> { const flowObjectConditions = new Map>(); @@ -108,13 +95,13 @@ export class FlowSearchService { return flowObjectConditions; } - private prepareFlowConditions(flowFilters: SearchFlowsFilters): Map { - const flowConditions = new Map(); + prepareFlowConditions(flowFilters: SearchFlowsFilters): any { + let flowConditions = {}; if (flowFilters) { Object.entries(flowFilters).forEach(([key, value]) => { if (value !== undefined) { - flowConditions.set(key, value); + flowConditions = { ...flowConditions, [key]: value }; } }); } @@ -122,11 +109,19 @@ export class FlowSearchService { return flowConditions; } - private determineStrategy(flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], conditions: any): FlowSearchStrategy { - if ((!flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0)) || (flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0))) { + determineStrategy( + flowFilters: SearchFlowsFilters, + flowObjectFilters: FlowObjectFilters[], + conditions: any + ): { strategy: FlowSearchStrategy; conditions: any } { + if ( + (!flowFilters && + (!flowObjectFilters || flowObjectFilters.length === 0)) || + (flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0)) + ) { const flowConditions = this.prepareFlowConditions(flowFilters); - conditions = { ...conditions, ...flowConditions } - return this.onlyFlowFiltersStrategy; + conditions = { ...conditions, ...flowConditions }; + return { strategy: this.onlyFlowFiltersStrategy, conditions }; } // else if (!flowFilters && flowObjectFilters.length !== 0) { // const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); @@ -139,7 +134,8 @@ export class FlowSearchService { // return new BothFlowFiltersStrategy(this); // } - throw new Error('Invalid combination of flowFilters and flowObjectFilters - temp: only provide flowFilters'); + throw new Error( + 'Invalid combination of flowFilters and flowObjectFilters - temp: only provide flowFilters' + ); } - } diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index f07ffb0c..155aa67f 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -50,14 +50,12 @@ export class FlowObjectFilters { } @InputType() -export class FlowCategory{ - +export class FlowCategory { @Field({ nullable: false }) id: number; @Field({ nullable: false }) group: string; - } @ArgsType() diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index b36c0113..50fb8877 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -95,7 +95,7 @@ export class Flow extends BaseFlow { reportDetails: ReportDetail[]; @Field(() => [FlowParkedParentSource], { nullable: true }) - parkedParentSource: FlowParkedParentSource[]; + parkedParentSource: FlowParkedParentSource[] | null; } @ObjectType() diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index b5d844a1..a0626758 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -1,6 +1,12 @@ -import { Database } from "@unocha/hpc-api-core/src/db"; -import { FlowSearchResult } from "../graphql/types"; +import { Database } from '@unocha/hpc-api-core/src/db'; +import { FlowSearchResult } from '../graphql/types'; -export interface FlowSearchStrategy{ - search(flowConditions: Map, orderBy: any, limit: number, cursorCondition: any, models: Database): Promise; -} \ No newline at end of file +export interface FlowSearchStrategy { + search( + flowConditions: Map, + orderBy: any, + limit: number, + cursorCondition: any, + models: Database + ): Promise; +} diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts index 652f0843..84307446 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -1,7 +1,5 @@ import { Database } from '@unocha/hpc-api-core/src/db'; -import { Ctx } from 'type-graphql'; import { Service } from 'typedi'; -import Context from '../../../Context'; import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { CategoryService } from '../../../categories/category-service'; import { ExternalReferenceService } from '../../../external-reference/external-reference-service'; @@ -18,8 +16,6 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @Service() export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { - - constructor( private readonly organizationService: OrganizationService, private readonly locationService: LocationService, @@ -29,30 +25,25 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { private readonly flowLinkService: FlowLinkService, private readonly externalReferenceService: ExternalReferenceService, private readonly reportDetailService: ReportDetailService, - private readonly flowService: FlowService) { - } - - async search(flowConditions: Map, orderBy: any, limit: number, cursorCondition: any, models: Database): Promise { - - // Fetch one more item to check for hasNextPage + private readonly flowService: FlowService + ) {} + + async search( + flowConditions: Map, + orderBy: any, + limit: number, + cursorCondition: any, + models: Database + ): Promise { + // Fetch one more item to check for hasNextPage const limitComputed = limit + 1; // Build conditions object - const conditions: any = { ...cursorCondition }; - - if (flowConditions.size > 0) { - flowConditions.forEach((value, key) => { - conditions[key] = value; - }); - } + const conditions: any = { ...cursorCondition, ...flowConditions }; + console.log('conditions in OnlyFlowFiltersStrategy', conditions); const [flows, countRes] = await Promise.all([ - this.flowService.getFlows( - models, - conditions, - orderBy, - limitComputed - ), + this.flowService.getFlows(models, conditions, orderBy, limitComputed), this.flowService.getFlowsCount(models, conditions), ]); @@ -109,66 +100,73 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { this.reportDetailService.getReportDetailsForFlows(flowIds, models), ]); - const items = flows.map((flow) => { - const flowLink = flowLinksMap.get(flow.id) || []; - const categories = categoriesMap.get(flow.id) || []; - const organizations = organizationsMap.get(flow.id) || []; - const locations = [...(locationsMap.get(flow.id) || [])]; - const plans = plansMap.get(flow.id) || []; - const usageYears = usageYearsMap.get(flow.id) || []; - const externalReferences = externalReferencesMap.get(flow.id) || []; - const reportDetails = reportDetailsMap.get(flow.id) || []; - - const parkedParentSource: FlowParkedParentSource[] = []; - if (flow.activeStatus && flowLink.length > 0) { - this.getParketParents(flow, flowLink, models, parkedParentSource); - } - - const childIDs: number[] = flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.childID.valueOf()) as number[]; - - const parentIDs: number[] = flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.parentID.valueOf()) as number[]; - - return { - // Mandatory fields - id: flow.id.valueOf(), - versionID: flow.versionID, - amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt.toISOString(), - updatedAt: flow.updatedAt.toISOString(), - activeStatus: flow.activeStatus, - restricted: flow.restricted, - // Optional fields - categories, - organizations, - locations, - plans, - usageYears, - childIDs, - parentIDs, - origAmount: flow.origAmount ? flow.origAmount.toString() : '', - origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', - externalReferences, - reportDetails, - parkedParentSource, - // Paged item field - cursor: flow.id.valueOf(), - }; - }); + const items = await Promise.all( + flows.map(async (flow) => { + const flowLink = flowLinksMap.get(flow.id) || []; + const categories = categoriesMap.get(flow.id) || []; + const organizations = organizationsMap.get(flow.id) || []; + const locations = [...(locationsMap.get(flow.id) || [])]; + const plans = plansMap.get(flow.id) || []; + const usageYears = usageYearsMap.get(flow.id) || []; + const externalReferences = externalReferencesMap.get(flow.id) || []; + const reportDetails = reportDetailsMap.get(flow.id) || []; + + let parkedParentSource: FlowParkedParentSource[] = []; + if (flow.activeStatus && flowLink.length > 0) { + parkedParentSource = await this.getParketParents( + flow, + flowLink, + models + ); + } + + const childIDs: number[] = flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.childID.valueOf()) as number[]; + + const parentIDs: number[] = flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.parentID.valueOf()) as number[]; + + return { + // Mandatory fields + id: flow.id.valueOf(), + versionID: flow.versionID, + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt.toISOString(), + updatedAt: flow.updatedAt.toISOString(), + activeStatus: flow.activeStatus, + restricted: flow.restricted, + // Optional fields + categories, + organizations, + locations, + plans, + usageYears, + childIDs, + parentIDs, + origAmount: flow.origAmount ? flow.origAmount.toString() : '', + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', + externalReferences, + reportDetails, + parkedParentSource: + parkedParentSource.length > 0 ? parkedParentSource : null, + // Paged item field + cursor: flow.id.valueOf(), + }; + }) + ); return { flows: items, hasNextPage: limit <= flows.length, - hasPreviousPage: false,// TODO: cursorCondition['id'].GT !== undefined, + hasPreviousPage: false, // TODO: cursorCondition['id'].GT !== undefined, startCursor: flows.length ? flows[0].id.valueOf() : 0, endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, pageSize: flows.length, @@ -210,8 +208,7 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { private async getParketParents( flow: any, flowLink: any[], - models: Database, - parkedParentSource: FlowParkedParentSource[] + models: Database ): Promise { const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); @@ -219,10 +216,6 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { (flowLink) => flowLink.parentID === flow.id ); - const parentFlowIds = flowLinksParent.map((flowLink) => - flowLink.parentID.valueOf() - ); - const categories = await models.category.find({ where: { group: 'flowType', @@ -241,12 +234,17 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { }, }); - const parentFlows = flowLinksParent.filter((flowLink) => { - return categoryRef.some( - (categoryRef) => - categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() - ); - }); - } + const parentFlows = flowLinksParent + .filter((flowLink) => { + return categoryRef.some( + (categoryRef) => + categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() + ); + }) + .map((flowLink) => { + return flowLink.parentID.valueOf(); + }); -} \ No newline at end of file + return parentFlows; + } +} diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index 09509563..63a97d2e 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -74,15 +74,18 @@ export class PlanService { }, }); - const planFlowOobject = plansFO.find( (planFO) => planFO.objectID === plan.id - ); + ); const flowId = planFlowOobject && planFlowOobject.flowID; - - const planMapped = this.mapPlansToFlowPlans(plan, planVersion[0], planFlowOobject?.refDirection || null); - + + const planMapped = this.mapPlansToFlowPlans( + plan, + planVersion[0], + planFlowOobject?.refDirection || null + ); + if (flowId) { if (!plansMap.has(flowId)) { plansMap.set(flowId, []); @@ -98,7 +101,8 @@ export class PlanService { private mapPlansToFlowPlans( plan: InstanceDataOfModel, planVersion: InstanceDataOfModel, - direction: string | null): BasePlan { + direction: string | null + ): BasePlan { return { id: plan.id.valueOf(), name: planVersion.name, diff --git a/tests/unit/flow-search-service.spec.ts b/tests/unit/flow-search-service.spec.ts new file mode 100644 index 00000000..3e759b1a --- /dev/null +++ b/tests/unit/flow-search-service.spec.ts @@ -0,0 +1,73 @@ +import Container from 'typedi'; +import { FlowSearchService } from '../../src/domain-services/flows/flow-search-service'; +import { SearchFlowsFilters } from '../../src/domain-services/flows/graphql/args'; + +describe('PrepareFlowConditions', () => { + let flowSearchService: FlowSearchService; + + beforeEach(() => { + // Initialize your class instance if needed + flowSearchService = Container.get(FlowSearchService); + }); + + it('should prepare flow conditions with valid filters', () => { + const flowFilters = new SearchFlowsFilters(); + flowFilters.id = 1; + flowFilters.activeStatus = true; + flowFilters.status = 'commitment'; + flowFilters.type = 'carryover'; + flowFilters.amountUSD = 1000; + flowFilters.reporterReferenceCode = 123; + flowFilters.sourceSystemId = 456; + flowFilters.legacyId = 789; + + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({ + id: 1, + activeStatus: true, + status: 'commitment', + type: 'carryover', + amountUSD: 1000, + reporterReferenceCode: 123, + sourceSystemId: 456, + legacyId: 789, + }); + }); + + it('should prepare flow conditions with some filters set to undefined', () => { + const flowFilters = new SearchFlowsFilters(); + flowFilters.id = 1; + flowFilters.activeStatus = true; + + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({ + id: 1, + activeStatus: true, + }); + }); + + it('should prepare flow conditions with all filters set to undefined', () => { + const flowFilters = new SearchFlowsFilters(); + + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({}); + }); + + it('should prepare flow conditions with some filters having falsy values', () => { + const flowFilters = new SearchFlowsFilters(); + flowFilters.id = 0; + flowFilters.activeStatus = false; + flowFilters.amountUSD = 0; + + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({ + id: 0, + activeStatus: false, + amountUSD: 0, + }); + }); +}); From 29ca26fe1c3d15f6627fc3e955247c6a66fcdfba Mon Sep 17 00:00:00 2001 From: manelcecs Date: Sun, 12 Nov 2023 14:20:37 +0100 Subject: [PATCH 22/67] Add FlowObject Strategy. --- .../flow-object/flow-object-service.ts | 17 + .../flows/flow-search-service.ts | 317 ++++++++++++++++-- .../flows/strategy/flow-search-strategy.ts | 9 +- .../impl/flow-object-conditions-strategy.ts | 98 ++++++ .../impl/only-flow-conditions-strategy.ts | 237 +------------ tests/unit/flow-search-service.spec.ts | 138 +++++--- 6 files changed, 504 insertions(+), 312 deletions(-) create mode 100644 src/domain-services/flow-object/flow-object-service.ts create mode 100644 src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts diff --git a/src/domain-services/flow-object/flow-object-service.ts b/src/domain-services/flow-object/flow-object-service.ts new file mode 100644 index 00000000..c7c23be3 --- /dev/null +++ b/src/domain-services/flow-object/flow-object-service.ts @@ -0,0 +1,17 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Service } from 'typedi'; + +@Service() +export class FlowObjectService { + async getFlowIdsFromFlowObjects( + models: Database, + where: any + ): Promise { + const flowObjects = await models.flowObject.find({ + where, + }); + // Keep only not duplicated flowIDs + return [...new Set(flowObjects.map((flowObject) => flowObject.flowID))]; + } +} diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index c7e6aa7d..5233d939 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -7,14 +7,33 @@ import { SearchFlowsArgs, SearchFlowsFilters, } from './graphql/args'; -import { FlowSearchResult } from './graphql/types'; +import { FlowParkedParentSource, FlowSearchResult } from './graphql/types'; import { FlowSearchStrategy } from './strategy/flow-search-strategy'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; +import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { CategoryService } from '../categories/category-service'; +import { ExternalReferenceService } from '../external-reference/external-reference-service'; +import { LocationService } from '../location/location-service'; +import { OrganizationService } from '../organizations/organization-service'; +import { PlanService } from '../plans/plan-service'; +import { ReportDetailService } from '../report-details/report-detail-service'; +import { UsageYearService } from '../usage-years/usage-year-service'; +import { FlowLinkService } from './flow-link-service'; @Service() export class FlowSearchService { constructor( - private readonly onlyFlowFiltersStrategy: OnlyFlowFiltersStrategy + private readonly onlyFlowFiltersStrategy: OnlyFlowFiltersStrategy, + private readonly flowObjectFiltersStrategy: FlowObjectFiltersStrategy, + private readonly organizationService: OrganizationService, + private readonly locationService: LocationService, + private readonly planService: PlanService, + private readonly usageYearService: UsageYearService, + private readonly categoryService: CategoryService, + private readonly flowLinkService: FlowLinkService, + private readonly externalReferenceService: ExternalReferenceService, + private readonly reportDetailService: ReportDetailService ) {} async search( @@ -55,44 +74,143 @@ export class FlowSearchService { cursorCondition ); - return await strategy.search( + // Fetch one more item to check for hasNextPage + const limitComputed = limit + 1; + + const { flows, count } = await strategy.search( conditions, orderBy, - limit, + limitComputed, cursorCondition, models ); - } - prepareFlowObjectConditions( - flowObjectFilters: FlowObjectFilters[] - ): Map> { - const flowObjectConditions = new Map>(); + // Remove the extra item used to check hasNextPage + const hasNextPage = flows.length > limit; + if (hasNextPage) { + flows.pop(); + } - for (const flowObjectFilter of flowObjectFilters || []) { - const objectType = flowObjectFilter.objectType; - const direction = flowObjectFilter.direction; - const objectID = flowObjectFilter.objectID; + const flowIds: FlowId[] = flows.map((flow) => flow.id); - // Ensure the map for the objectType is initialized - if (!flowObjectConditions.has(objectType)) { - flowObjectConditions.set(objectType, new Map()); - } + const organizationsFO: any[] = []; + const locationsFO: any[] = []; + const plansFO: any[] = []; + const usageYearsFO: any[] = []; - const flowObjectCondition = flowObjectConditions.get(objectType); + const [externalReferencesMap] = await Promise.all([ + this.externalReferenceService.getExternalReferencesForFlows( + flowIds, + models + ), + this.getFlowObjects( + flowIds, + models, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO + ), + ]); - // Ensure the map for the direction is initialized - if (!flowObjectCondition!.has(direction)) { - flowObjectCondition!.set(direction, []); - } + const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( + flowIds, + models + ); - const flowObjectDirectionCondition = flowObjectCondition!.get(direction); + const [ + categoriesMap, + organizationsMap, + locationsMap, + plansMap, + usageYearsMap, + reportDetailsMap, + ] = await Promise.all([ + this.categoryService.getCategoriesForFlows(flowLinksMap, models), + this.organizationService.getOrganizationsForFlows( + organizationsFO, + models + ), + this.locationService.getLocationsForFlows(locationsFO, models), + this.planService.getPlansForFlows(plansFO, models), + this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), + this.reportDetailService.getReportDetailsForFlows(flowIds, models), + ]); - // Add the objectID to the array - flowObjectDirectionCondition!.push(objectID); - } + const items = await Promise.all( + flows.map(async (flow) => { + const flowLink = flowLinksMap.get(flow.id) || []; + const categories = categoriesMap.get(flow.id) || []; + const organizations = organizationsMap.get(flow.id) || []; + const locations = [...(locationsMap.get(flow.id) || [])]; + const plans = plansMap.get(flow.id) || []; + const usageYears = usageYearsMap.get(flow.id) || []; + const externalReferences = externalReferencesMap.get(flow.id) || []; + const reportDetails = reportDetailsMap.get(flow.id) || []; + + let parkedParentSource: FlowParkedParentSource[] = []; + if (flow.activeStatus && flowLink.length > 0) { + parkedParentSource = await this.getParketParents( + flow, + flowLink, + models + ); + } + + const childIDs: number[] = flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.childID.valueOf()) as number[]; + + const parentIDs: number[] = flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.parentID.valueOf()) as number[]; - return flowObjectConditions; + return { + // Mandatory fields + id: flow.id.valueOf(), + versionID: flow.versionID, + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt.toISOString(), + updatedAt: flow.updatedAt.toISOString(), + activeStatus: flow.activeStatus, + restricted: flow.restricted, + // Optional fields + categories, + organizations, + locations, + plans, + usageYears, + childIDs, + parentIDs, + origAmount: flow.origAmount ? flow.origAmount.toString() : '', + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', + externalReferences, + reportDetails, + parkedParentSource: + parkedParentSource.length > 0 ? parkedParentSource : null, + // Paged item field + cursor: flow.id.valueOf(), + }; + }) + ); + + return { + flows: items, + hasNextPage: limit <= flows.length, + hasPreviousPage: afterCursor !== undefined, + startCursor: flows.length ? flows[0].id.valueOf() : 0, + endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, + pageSize: flows.length, + sortField: orderBy.column, + sortOrder: orderBy.order, + total: count, + }; } prepareFlowConditions(flowFilters: SearchFlowsFilters): any { @@ -109,6 +227,37 @@ export class FlowSearchService { return flowConditions; } + prepareFlowObjectConditions( + flowObjectFilters: FlowObjectFilters[] + ): Map> { + const flowObjectsConditions: Map> = new Map(); + + flowObjectFilters.forEach((flowObjectFilter) => { + const { objectType, direction, objectID } = flowObjectFilter; + + if (!flowObjectsConditions.has(objectType)) { + flowObjectsConditions.set(objectType, new Map()); + } + + const refDirectionMap = flowObjectsConditions.get(objectType); + if (!refDirectionMap!.has(direction)) { + refDirectionMap!.set(direction, []); + } + + const objectIDsArray = refDirectionMap!.get(direction); + + if (objectIDsArray!.includes(objectID)) { + throw new Error( + `Duplicate flow object filter: ${objectType} ${direction} ${objectID}` + ); + } + + objectIDsArray!.push(objectID); + }); + + return flowObjectsConditions; + } + determineStrategy( flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], @@ -122,20 +271,114 @@ export class FlowSearchService { const flowConditions = this.prepareFlowConditions(flowFilters); conditions = { ...conditions, ...flowConditions }; return { strategy: this.onlyFlowFiltersStrategy, conditions }; + } else if (!flowFilters && flowObjectFilters.length !== 0) { + const flowObjectConditions = + this.prepareFlowObjectConditions(flowObjectFilters); + conditions = { ...conditions, ...flowObjectConditions }; + + return { + strategy: this.flowObjectFiltersStrategy, + conditions: this.buildConditionsMap(undefined, flowObjectConditions), + }; + } else if (flowFilters && flowObjectFilters.length !== 0) { + const flowConditions = this.prepareFlowConditions(flowFilters); + const flowObjectConditions = + this.prepareFlowObjectConditions(flowObjectFilters); + conditions = { + ...conditions, + ...flowConditions, + ...flowObjectConditions, + }; + + return { + strategy: this.flowObjectFiltersStrategy, + conditions: this.buildConditionsMap( + flowConditions, + flowObjectConditions + ), + }; } - // else if (!flowFilters && flowObjectFilters.length !== 0) { - // const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); - // conditions = {...conditions, ...flowObjectConditions} - // return new OnlyFlowObjectFiltersStrategy(this); - // } else if (flowFilters && flowObjectFilters.length !== 0) { - // const flowConditions = this.prepareFlowConditions(flowFilters); - // const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); - // conditions = {...conditions, ...flowConditions, ...flowObjectConditions} - // return new BothFlowFiltersStrategy(this); - // } throw new Error( 'Invalid combination of flowFilters and flowObjectFilters - temp: only provide flowFilters' ); } + + private buildConditionsMap(flowConditions: any, flowObjectConditions: any) { + const conditionsMap = new Map(); + conditionsMap.set('flowObjects', flowObjectConditions); + conditionsMap.set('flow', flowConditions); + return conditionsMap; + } + private async getFlowObjects( + flowIds: FlowId[], + models: Database, + organizationsFO: any[], + locationsFO: any[], + plansFO: any[], + usageYearsFO: any[] + ): Promise { + const flowObjects = await models.flowObject.find({ + where: { + flowID: { + [Op.IN]: flowIds, + }, + }, + }); + + flowObjects.forEach((flowObject) => { + if (flowObject.objectType === 'organization') { + organizationsFO.push(flowObject); + } else if (flowObject.objectType === 'location') { + locationsFO.push(flowObject); + } else if (flowObject.objectType === 'plan') { + plansFO.push(flowObject); + } else if (flowObject.objectType === 'usageYear') { + usageYearsFO.push(flowObject); + } + }); + } + + private async getParketParents( + flow: any, + flowLink: any[], + models: Database + ): Promise { + const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); + + const flowLinksParent = flowLinksDepth0.filter( + (flowLink) => flowLink.parentID === flow.id + ); + + const categories = await models.category.find({ + where: { + group: 'flowType', + name: 'parked', + }, + }); + + const categoriesIDs = categories.map((category) => category.id); + + const categoryRef = await models.categoryRef.find({ + where: { + categoryID: { + [Op.IN]: categoriesIDs, + }, + versionID: flow.versionID, + }, + }); + + const parentFlows = flowLinksParent + .filter((flowLink) => { + return categoryRef.some( + (categoryRef) => + categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() + ); + }) + .map((flowLink) => { + return flowLink.parentID.valueOf(); + }); + + return parentFlows; + } } diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index a0626758..76f8b6a3 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -1,5 +1,12 @@ import { Database } from '@unocha/hpc-api-core/src/db'; import { FlowSearchResult } from '../graphql/types'; +import { InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; + +export interface FlowSearchStrategyResponse { + flows: InstanceDataOfModel[]; + count: number; +} export interface FlowSearchStrategy { search( @@ -8,5 +15,5 @@ export interface FlowSearchStrategy { limit: number, cursorCondition: any, models: Database - ): Promise; + ): Promise; } diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts new file mode 100644 index 00000000..f44020bd --- /dev/null +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -0,0 +1,98 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Service } from 'typedi'; +import { FlowObjectService } from '../../../flow-object/flow-object-service'; +import { FlowService } from '../../flow-service'; +import { + FlowSearchStrategy, + FlowSearchStrategyResponse, +} from '../flow-search-strategy'; + +@Service() +export class FlowObjectFiltersStrategy implements FlowSearchStrategy { + constructor( + private readonly flowService: FlowService, + private readonly flowObjectService: FlowObjectService + ) {} + + async search( + flowConditions: Map, + orderBy: any, + limit: number, + cursorCondition: any, + models: Database + ): Promise { + const flowObjectsConditions: Map< + string, + Map + > = flowConditions.get('flowObjects'); + const flowEntityConditions = flowConditions.get('flow'); + + const flowObjectWhere = this.mapFlowObjectConditionsToWhereClause( + flowObjectsConditions + ); + + // Obtain flowIDs based on provided flowObject conditions + const flowIDsFromFilteredFlowObjects = + await this.flowObjectService.getFlowIdsFromFlowObjects( + models, + flowObjectWhere + ); + + // Combine conditions from flowObjects FlowIDs and flow conditions + const mergedFlowConditions = { + ...flowEntityConditions, + flowID: { + [Op.IN]: flowIDsFromFilteredFlowObjects, + }, + }; + + const conditions = { ...cursorCondition, ...mergedFlowConditions }; + + // Obtain flows and flowCount based on flowIDs from filtered flowObjects + // and flow conditions + const [flows, countRes] = await Promise.all([ + this.flowService.getFlows(models, conditions, orderBy, limit), + this.flowService.getFlowsCount(models, conditions), + ]); + + // Map count result query to count object + const countObject = countRes[0] as { count: number }; + + return { flows, count: countObject.count }; + } + + /* + * Map structure: + * { + * KEY = objectType: string, + * VALUE = { + * KEY = refDirection: string, + * VALUE = [objectID: number] + * } + * } + */ + private mapFlowObjectConditionsToWhereClause( + flowObjectConditions: Map> + ): any { + let flowObjectWhere: any = {}; + for (const [objectType, refDirectionMap] of flowObjectConditions) { + for (const [refDirection, objectIDs] of refDirectionMap) { + flowObjectWhere = { + ...flowObjectWhere, + objectID: { + [Op.IN]: objectIDs, + }, + refDirection: { + [Op.IN]: refDirection, + }, + objectType: { + [Op.IN]: objectType, + }, + }; + } + } + + return flowObjectWhere; + } +} diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts index 84307446..12f97057 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -1,250 +1,35 @@ import { Database } from '@unocha/hpc-api-core/src/db'; import { Service } from 'typedi'; import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { CategoryService } from '../../../categories/category-service'; -import { ExternalReferenceService } from '../../../external-reference/external-reference-service'; -import { LocationService } from '../../../location/location-service'; -import { OrganizationService } from '../../../organizations/organization-service'; -import { PlanService } from '../../../plans/plan-service'; -import { ReportDetailService } from '../../../report-details/report-detail-service'; -import { UsageYearService } from '../../../usage-years/usage-year-service'; -import { FlowLinkService } from '../../flow-link-service'; import { FlowService } from '../../flow-service'; -import { FlowParkedParentSource, FlowSearchResult } from '../../graphql/types'; -import { FlowSearchStrategy } from '../flow-search-strategy'; +import { + FlowSearchStrategy, + FlowSearchStrategyResponse, +} from '../flow-search-strategy'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @Service() export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { - constructor( - private readonly organizationService: OrganizationService, - private readonly locationService: LocationService, - private readonly planService: PlanService, - private readonly usageYearService: UsageYearService, - private readonly categoryService: CategoryService, - private readonly flowLinkService: FlowLinkService, - private readonly externalReferenceService: ExternalReferenceService, - private readonly reportDetailService: ReportDetailService, - private readonly flowService: FlowService - ) {} + constructor(private readonly flowService: FlowService) {} async search( - flowConditions: Map, + flowConditions: any, orderBy: any, limit: number, cursorCondition: any, models: Database - ): Promise { - // Fetch one more item to check for hasNextPage - const limitComputed = limit + 1; - + ): Promise { // Build conditions object const conditions: any = { ...cursorCondition, ...flowConditions }; - console.log('conditions in OnlyFlowFiltersStrategy', conditions); const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, conditions, orderBy, limitComputed), + this.flowService.getFlows(models, conditions, orderBy, limit), this.flowService.getFlowsCount(models, conditions), ]); - const hasNextPage = flows.length > limit; - if (hasNextPage) { - flows.pop(); // Remove the extra item used to check hasNextPage - } - - const count = countRes[0] as { count: number }; - - const flowIds: FlowId[] = flows.map((flow) => flow.id); - - const organizationsFO: any[] = []; - const locationsFO: any[] = []; - const plansFO: any[] = []; - const usageYearsFO: any[] = []; - - const [externalReferencesMap] = await Promise.all([ - this.externalReferenceService.getExternalReferencesForFlows( - flowIds, - models - ), - this.getFlowObjects( - flowIds, - models, - organizationsFO, - locationsFO, - plansFO, - usageYearsFO - ), - ]); - - const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( - flowIds, - models - ); - - const [ - categoriesMap, - organizationsMap, - locationsMap, - plansMap, - usageYearsMap, - reportDetailsMap, - ] = await Promise.all([ - this.categoryService.getCategoriesForFlows(flowLinksMap, models), - this.organizationService.getOrganizationsForFlows( - organizationsFO, - models - ), - this.locationService.getLocationsForFlows(locationsFO, models), - this.planService.getPlansForFlows(plansFO, models), - this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), - this.reportDetailService.getReportDetailsForFlows(flowIds, models), - ]); - - const items = await Promise.all( - flows.map(async (flow) => { - const flowLink = flowLinksMap.get(flow.id) || []; - const categories = categoriesMap.get(flow.id) || []; - const organizations = organizationsMap.get(flow.id) || []; - const locations = [...(locationsMap.get(flow.id) || [])]; - const plans = plansMap.get(flow.id) || []; - const usageYears = usageYearsMap.get(flow.id) || []; - const externalReferences = externalReferencesMap.get(flow.id) || []; - const reportDetails = reportDetailsMap.get(flow.id) || []; - - let parkedParentSource: FlowParkedParentSource[] = []; - if (flow.activeStatus && flowLink.length > 0) { - parkedParentSource = await this.getParketParents( - flow, - flowLink, - models - ); - } - - const childIDs: number[] = flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.childID.valueOf()) as number[]; - - const parentIDs: number[] = flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.parentID.valueOf()) as number[]; - - return { - // Mandatory fields - id: flow.id.valueOf(), - versionID: flow.versionID, - amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt.toISOString(), - updatedAt: flow.updatedAt.toISOString(), - activeStatus: flow.activeStatus, - restricted: flow.restricted, - // Optional fields - categories, - organizations, - locations, - plans, - usageYears, - childIDs, - parentIDs, - origAmount: flow.origAmount ? flow.origAmount.toString() : '', - origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', - externalReferences, - reportDetails, - parkedParentSource: - parkedParentSource.length > 0 ? parkedParentSource : null, - // Paged item field - cursor: flow.id.valueOf(), - }; - }) - ); - - return { - flows: items, - hasNextPage: limit <= flows.length, - hasPreviousPage: false, // TODO: cursorCondition['id'].GT !== undefined, - startCursor: flows.length ? flows[0].id.valueOf() : 0, - endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, - pageSize: flows.length, - sortField: orderBy.column, - sortOrder: orderBy.order, - total: count.count, - }; - } - - private async getFlowObjects( - flowIds: FlowId[], - models: Database, - organizationsFO: any[], - locationsFO: any[], - plansFO: any[], - usageYearsFO: any[] - ): Promise { - const flowObjects = await models.flowObject.find({ - where: { - flowID: { - [Op.IN]: flowIds, - }, - }, - }); - - flowObjects.forEach((flowObject) => { - if (flowObject.objectType === 'organization') { - organizationsFO.push(flowObject); - } else if (flowObject.objectType === 'location') { - locationsFO.push(flowObject); - } else if (flowObject.objectType === 'plan') { - plansFO.push(flowObject); - } else if (flowObject.objectType === 'usageYear') { - usageYearsFO.push(flowObject); - } - }); - } - - private async getParketParents( - flow: any, - flowLink: any[], - models: Database - ): Promise { - const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); - - const flowLinksParent = flowLinksDepth0.filter( - (flowLink) => flowLink.parentID === flow.id - ); - - const categories = await models.category.find({ - where: { - group: 'flowType', - name: 'parked', - }, - }); - - const categoriesIDs = categories.map((category) => category.id); - - const categoryRef = await models.categoryRef.find({ - where: { - categoryID: { - [Op.IN]: categoriesIDs, - }, - versionID: flow.versionID, - }, - }); - - const parentFlows = flowLinksParent - .filter((flowLink) => { - return categoryRef.some( - (categoryRef) => - categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() - ); - }) - .map((flowLink) => { - return flowLink.parentID.valueOf(); - }); + // Map count result query to count object + const countObject = countRes[0] as { count: number }; - return parentFlows; + return { flows, count: countObject.count }; } } diff --git a/tests/unit/flow-search-service.spec.ts b/tests/unit/flow-search-service.spec.ts index 3e759b1a..7b2c655b 100644 --- a/tests/unit/flow-search-service.spec.ts +++ b/tests/unit/flow-search-service.spec.ts @@ -1,73 +1,115 @@ import Container from 'typedi'; import { FlowSearchService } from '../../src/domain-services/flows/flow-search-service'; -import { SearchFlowsFilters } from '../../src/domain-services/flows/graphql/args'; +import { + FlowObjectFilters, + SearchFlowsFilters, +} from '../../src/domain-services/flows/graphql/args'; -describe('PrepareFlowConditions', () => { +describe('FlowSearchService', () => { let flowSearchService: FlowSearchService; beforeEach(() => { // Initialize your class instance if needed flowSearchService = Container.get(FlowSearchService); }); + describe('PrepareFlowConditions', () => { + it('should prepare flow conditions with valid filters', () => { + const flowFilters = new SearchFlowsFilters(); + flowFilters.id = 1; + flowFilters.activeStatus = true; + flowFilters.status = 'commitment'; + flowFilters.type = 'carryover'; + flowFilters.amountUSD = 1000; + flowFilters.reporterReferenceCode = 123; + flowFilters.sourceSystemId = 456; + flowFilters.legacyId = 789; - it('should prepare flow conditions with valid filters', () => { - const flowFilters = new SearchFlowsFilters(); - flowFilters.id = 1; - flowFilters.activeStatus = true; - flowFilters.status = 'commitment'; - flowFilters.type = 'carryover'; - flowFilters.amountUSD = 1000; - flowFilters.reporterReferenceCode = 123; - flowFilters.sourceSystemId = 456; - flowFilters.legacyId = 789; - - const result = flowSearchService.prepareFlowConditions(flowFilters); - - expect(result).toEqual({ - id: 1, - activeStatus: true, - status: 'commitment', - type: 'carryover', - amountUSD: 1000, - reporterReferenceCode: 123, - sourceSystemId: 456, - legacyId: 789, + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({ + id: 1, + activeStatus: true, + status: 'commitment', + type: 'carryover', + amountUSD: 1000, + reporterReferenceCode: 123, + sourceSystemId: 456, + legacyId: 789, + }); }); - }); - it('should prepare flow conditions with some filters set to undefined', () => { - const flowFilters = new SearchFlowsFilters(); - flowFilters.id = 1; - flowFilters.activeStatus = true; + it('should prepare flow conditions with some filters set to undefined', () => { + const flowFilters = new SearchFlowsFilters(); + flowFilters.id = 1; + flowFilters.activeStatus = true; - const result = flowSearchService.prepareFlowConditions(flowFilters); + const result = flowSearchService.prepareFlowConditions(flowFilters); - expect(result).toEqual({ - id: 1, - activeStatus: true, + expect(result).toEqual({ + id: 1, + activeStatus: true, + }); }); - }); - it('should prepare flow conditions with all filters set to undefined', () => { - const flowFilters = new SearchFlowsFilters(); + it('should prepare flow conditions with all filters set to undefined', () => { + const flowFilters = new SearchFlowsFilters(); + + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({}); + }); - const result = flowSearchService.prepareFlowConditions(flowFilters); + it('should prepare flow conditions with some filters having falsy values', () => { + const flowFilters = new SearchFlowsFilters(); + flowFilters.id = 0; + flowFilters.activeStatus = false; + flowFilters.amountUSD = 0; - expect(result).toEqual({}); + const result = flowSearchService.prepareFlowConditions(flowFilters); + + expect(result).toEqual({ + id: 0, + activeStatus: false, + amountUSD: 0, + }); + }); }); + describe('prepareFlowObjectConditions', () => { + it('should prepare flow object conditions correctly', () => { + const flowObjectFilters: FlowObjectFilters[] = [ + { objectType: 'organization', direction: 'source', objectID: 12469 }, + { + objectType: 'organization', + direction: 'destination', + objectID: 5197, + }, + ]; + + const result = + flowSearchService.prepareFlowObjectConditions(flowObjectFilters); - it('should prepare flow conditions with some filters having falsy values', () => { - const flowFilters = new SearchFlowsFilters(); - flowFilters.id = 0; - flowFilters.activeStatus = false; - flowFilters.amountUSD = 0; + const expected = new Map>([ + [ + 'organization', + new Map([ + ['source', [12469]], + ['destination', [5197]], + ]), + ], + ]); + + expect(result).toEqual(expected); + }); - const result = flowSearchService.prepareFlowConditions(flowFilters); + it('should throw an error for duplicate flow object filter', () => { + const flowObjectFilters: FlowObjectFilters[] = [ + { objectType: 'organization', direction: 'source', objectID: 12469 }, + { objectType: 'organization', direction: 'source', objectID: 12469 }, // Duplicate filter + ]; - expect(result).toEqual({ - id: 0, - activeStatus: false, - amountUSD: 0, + expect(() => + flowSearchService.prepareFlowObjectConditions(flowObjectFilters) + ).toThrowError('Duplicate flow object filter: organization source 12469'); }); }); }); From 51e70cc7a1461bd27ece05919ac2a1cacbe21cbb Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 13 Nov 2023 13:26:51 +0100 Subject: [PATCH 23/67] Fix bug when apply FlowObject filter. --- .../{flows => flow-link}/flow-link-service.ts | 0 .../flows/flow-search-service.ts | 2 +- .../impl/flow-object-conditions-strategy.ts | 22 +++++++++---------- 3 files changed, 12 insertions(+), 12 deletions(-) rename src/domain-services/{flows => flow-link}/flow-link-service.ts (100%) diff --git a/src/domain-services/flows/flow-link-service.ts b/src/domain-services/flow-link/flow-link-service.ts similarity index 100% rename from src/domain-services/flows/flow-link-service.ts rename to src/domain-services/flow-link/flow-link-service.ts diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 5233d939..b60f2e89 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -19,7 +19,7 @@ import { OrganizationService } from '../organizations/organization-service'; import { PlanService } from '../plans/plan-service'; import { ReportDetailService } from '../report-details/report-detail-service'; import { UsageYearService } from '../usage-years/usage-year-service'; -import { FlowLinkService } from './flow-link-service'; +import { FlowLinkService } from '../flow-link/flow-link-service'; @Service() export class FlowSearchService { diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts index f44020bd..6bfcc6ea 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -1,5 +1,5 @@ import { Database } from '@unocha/hpc-api-core/src/db'; -import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; import { FlowService } from '../../flow-service'; @@ -42,7 +42,7 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { // Combine conditions from flowObjects FlowIDs and flow conditions const mergedFlowConditions = { ...flowEntityConditions, - flowID: { + id: { [Op.IN]: flowIDsFromFilteredFlowObjects, }, }; @@ -80,15 +80,15 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { for (const [refDirection, objectIDs] of refDirectionMap) { flowObjectWhere = { ...flowObjectWhere, - objectID: { - [Op.IN]: objectIDs, - }, - refDirection: { - [Op.IN]: refDirection, - }, - objectType: { - [Op.IN]: objectType, - }, + objectID: { + [Op.IN]: objectIDs, + }, + refDirection: { + [Op.LIKE]: refDirection, + }, + objectType: { + [Op.LIKE]: objectType, + }, }; } } From 6ed5cd7d4fe20696918e717791a064cf19644f93 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 13 Nov 2023 16:53:22 +0100 Subject: [PATCH 24/67] Ref: add types instead of any --- .../categories/category-service.ts | 5 +- .../flow-object/flow-object-service.ts | 11 ++ src/domain-services/flow-object/model.ts | 4 + .../flows/flow-search-service.ts | 183 +++++++++++------- src/domain-services/flows/model.ts | 4 + .../flows/strategy/flow-search-strategy.ts | 6 +- .../impl/flow-object-conditions-strategy.ts | 2 +- .../impl/only-flow-conditions-strategy.ts | 4 +- 8 files changed, 140 insertions(+), 79 deletions(-) create mode 100644 src/domain-services/flow-object/model.ts create mode 100644 src/domain-services/flows/model.ts diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index ec19c9f9..61099d94 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -5,7 +5,6 @@ import { Category } from './graphql/types'; import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -// TODO: add proper type for flowLinks @Service() export class CategoryService { async getCategoriesForFlows( @@ -50,7 +49,7 @@ export class CategoryService { categoriesMap.set(flowId, []); } - const categoriesForFlow = categoriesMap.get(flowId)!; + const categoriesPerFlow = categoriesMap.get(flowId)!; const category = categories.find((cat) => cat.id === catRef.categoryID); @@ -58,7 +57,7 @@ export class CategoryService { throw new Error(`Category with ID ${catRef.categoryID} does not exist`); } - categoriesForFlow.push(this.mapCategoryToFlowCategory(category, catRef)); + categoriesPerFlow.push(this.mapCategoryToFlowCategory(category, catRef)); }); return categoriesMap; diff --git a/src/domain-services/flow-object/flow-object-service.ts b/src/domain-services/flow-object/flow-object-service.ts index c7c23be3..049bd03d 100644 --- a/src/domain-services/flow-object/flow-object-service.ts +++ b/src/domain-services/flow-object/flow-object-service.ts @@ -1,5 +1,6 @@ import { Database } from '@unocha/hpc-api-core/src/db'; import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; @Service() @@ -14,4 +15,14 @@ export class FlowObjectService { // Keep only not duplicated flowIDs return [...new Set(flowObjects.map((flowObject) => flowObject.flowID))]; } + + async getFlowObjectByFlowId(models: Database, flowIds: FlowId[]) { + return await models.flowObject.find({ + where: { + flowID: { + [Op.IN]: flowIds, + }, + }, + }); + } } diff --git a/src/domain-services/flow-object/model.ts b/src/domain-services/flow-object/model.ts new file mode 100644 index 00000000..1d666288 --- /dev/null +++ b/src/domain-services/flow-object/model.ts @@ -0,0 +1,4 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; + +export type FlowObject = InstanceOfModel; diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index b60f2e89..bbf48472 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -7,7 +7,11 @@ import { SearchFlowsArgs, SearchFlowsFilters, } from './graphql/args'; -import { FlowParkedParentSource, FlowSearchResult } from './graphql/types'; +import { + FlowPaged, + FlowParkedParentSource, + FlowSearchResult, +} from './graphql/types'; import { FlowSearchStrategy } from './strategy/flow-search-strategy'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy'; @@ -20,6 +24,14 @@ import { PlanService } from '../plans/plan-service'; import { ReportDetailService } from '../report-details/report-detail-service'; import { UsageYearService } from '../usage-years/usage-year-service'; import { FlowLinkService } from '../flow-link/flow-link-service'; +import { FlowObject } from '../flow-object/model'; +import { FlowObjectService } from '../flow-object/flow-object-service'; +import { FlowEntity } from './model'; +import { Category } from '../categories/graphql/types'; +import { Organization } from '../organizations/graphql/types'; +import { BaseLocation } from '../location/graphql/types'; +import { BasePlan } from '../plans/graphql/types'; +import { UsageYear } from '../usage-years/grpahql/types'; @Service() export class FlowSearchService { @@ -33,7 +45,8 @@ export class FlowSearchService { private readonly categoryService: CategoryService, private readonly flowLinkService: FlowLinkService, private readonly externalReferenceService: ExternalReferenceService, - private readonly reportDetailService: ReportDetailService + private readonly reportDetailService: ReportDetailService, + private readonly flowObjectService: FlowObjectService ) {} async search( @@ -42,25 +55,6 @@ export class FlowSearchService { ): Promise { const { limit, afterCursor, beforeCursor, sortField, sortOrder } = filters; - if (beforeCursor && afterCursor) { - throw new Error('Cannot use before and after cursor at the same time'); - } - - let cursorCondition; - if (afterCursor) { - cursorCondition = { - id: { - [Op.GT]: createBrandedValue(afterCursor), - }, - }; - } else if (beforeCursor) { - cursorCondition = { - id: { - [Op.LT]: createBrandedValue(beforeCursor), - }, - }; - } - const orderBy = { column: sortField ?? 'updatedAt', order: sortOrder ?? 'desc', @@ -68,15 +62,21 @@ export class FlowSearchService { const { flowFilters, flowObjectFilters } = filters; + const cursorCondition = this.buildCursorCondition( + beforeCursor, + afterCursor + ); + + // Determine strategy of how to search for flows const { strategy, conditions } = this.determineStrategy( flowFilters, - flowObjectFilters, - cursorCondition + flowObjectFilters ); // Fetch one more item to check for hasNextPage const limitComputed = limit + 1; + // Obtain flows and its count based on the strategy selected const { flows, count } = await strategy.search( conditions, orderBy, @@ -93,31 +93,36 @@ export class FlowSearchService { const flowIds: FlowId[] = flows.map((flow) => flow.id); - const organizationsFO: any[] = []; - const locationsFO: any[] = []; - const plansFO: any[] = []; - const usageYearsFO: any[] = []; - - const [externalReferencesMap] = await Promise.all([ + // Obtain external references and flow objects in parallel + const [externalReferencesMap, flowObjects] = await Promise.all([ this.externalReferenceService.getExternalReferencesForFlows( flowIds, models ), - this.getFlowObjects( - flowIds, - models, - organizationsFO, - locationsFO, - plansFO, - usageYearsFO - ), + this.flowObjectService.getFlowObjectByFlowId(models, flowIds), ]); + // Map flow objects to their respective arrays + const organizationsFO: FlowObject[] = []; + const locationsFO: FlowObject[] = []; + const plansFO: FlowObject[] = []; + const usageYearsFO: FlowObject[] = []; + + this.mapFlowObjects( + flowObjects, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO + ); + + // Obtain flow links const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( flowIds, models ); + // Perform all nested queries in parallel const [ categoriesMap, organizationsMap, @@ -171,16 +176,8 @@ export class FlowSearchService { ) .map((flowLink) => flowLink.parentID.valueOf()) as number[]; - return { - // Mandatory fields - id: flow.id.valueOf(), - versionID: flow.versionID, - amountUSD: flow.amountUSD.toString(), - createdAt: flow.createdAt.toISOString(), - updatedAt: flow.updatedAt.toISOString(), - activeStatus: flow.activeStatus, - restricted: flow.restricted, - // Optional fields + return this.buildFlowDTO( + flow, categories, organizations, locations, @@ -188,15 +185,10 @@ export class FlowSearchService { usageYears, childIDs, parentIDs, - origAmount: flow.origAmount ? flow.origAmount.toString() : '', - origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', externalReferences, reportDetails, - parkedParentSource: - parkedParentSource.length > 0 ? parkedParentSource : null, - // Paged item field - cursor: flow.id.valueOf(), - }; + parkedParentSource + ); }) ); @@ -260,9 +252,9 @@ export class FlowSearchService { determineStrategy( flowFilters: SearchFlowsFilters, - flowObjectFilters: FlowObjectFilters[], - conditions: any + flowObjectFilters: FlowObjectFilters[] ): { strategy: FlowSearchStrategy; conditions: any } { + let conditions = {}; if ( (!flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0)) || @@ -310,22 +302,13 @@ export class FlowSearchService { conditionsMap.set('flow', flowConditions); return conditionsMap; } - private async getFlowObjects( - flowIds: FlowId[], - models: Database, + private mapFlowObjects( + flowObjects: FlowObject[], organizationsFO: any[], locationsFO: any[], plansFO: any[], usageYearsFO: any[] - ): Promise { - const flowObjects = await models.flowObject.find({ - where: { - flowID: { - [Op.IN]: flowIds, - }, - }, - }); - + ) { flowObjects.forEach((flowObject) => { if (flowObject.objectType === 'organization') { organizationsFO.push(flowObject); @@ -381,4 +364,68 @@ export class FlowSearchService { return parentFlows; } + + private buildCursorCondition(beforeCursor: number, afterCursor: number) { + if (beforeCursor && afterCursor) { + throw new Error('Cannot use before and after cursor at the same time'); + } + + let cursorCondition; + if (afterCursor) { + cursorCondition = { + id: { + [Op.GT]: createBrandedValue(afterCursor), + }, + }; + } else if (beforeCursor) { + cursorCondition = { + id: { + [Op.LT]: createBrandedValue(beforeCursor), + }, + }; + } + + return cursorCondition; + } + + private buildFlowDTO( + flow: FlowEntity, + categories: Category[], + organizations: Organization[], + locations: BaseLocation[], + plans: BasePlan[], + usageYears: UsageYear[], + childIDs: number[], + parentIDs: number[], + externalReferences: any[], + reportDetails: any[], + parkedParentSource: FlowParkedParentSource[] + ): FlowPaged { + return { + // Mandatory fields + id: flow.id.valueOf(), + versionID: flow.versionID, + amountUSD: flow.amountUSD.toString(), + createdAt: flow.createdAt.toISOString(), + updatedAt: flow.updatedAt.toISOString(), + activeStatus: flow.activeStatus, + restricted: flow.restricted, + // Optional fields + categories, + organizations, + locations, + plans, + usageYears, + childIDs, + parentIDs, + origAmount: flow.origAmount ? flow.origAmount.toString() : '', + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', + externalReferences, + reportDetails, + parkedParentSource: + parkedParentSource.length > 0 ? parkedParentSource : null, + // Paged item field + cursor: flow.id.valueOf(), + }; + } } diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts new file mode 100644 index 00000000..b5d35436 --- /dev/null +++ b/src/domain-services/flows/model.ts @@ -0,0 +1,4 @@ +import { Database } from '@unocha/hpc-api-core/src/db'; +import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; + +export type FlowEntity = InstanceDataOfModel; diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 76f8b6a3..cd15f90e 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -1,10 +1,8 @@ import { Database } from '@unocha/hpc-api-core/src/db'; -import { FlowSearchResult } from '../graphql/types'; -import { InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { FlowEntity } from '../model'; export interface FlowSearchStrategyResponse { - flows: InstanceDataOfModel[]; + flows: FlowEntity[]; count: number; } diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts index 6bfcc6ea..3ccb19bd 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -53,7 +53,7 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { // and flow conditions const [flows, countRes] = await Promise.all([ this.flowService.getFlows(models, conditions, orderBy, limit), - this.flowService.getFlowsCount(models, conditions), + this.flowService.getFlowsCount(models, mergedFlowConditions), ]); // Map count result query to count object diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts index 12f97057..2af8506b 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -1,12 +1,10 @@ import { Database } from '@unocha/hpc-api-core/src/db'; import { Service } from 'typedi'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { FlowService } from '../../flow-service'; import { FlowSearchStrategy, FlowSearchStrategyResponse, } from '../flow-search-strategy'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; @Service() export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { @@ -24,7 +22,7 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { const [flows, countRes] = await Promise.all([ this.flowService.getFlows(models, conditions, orderBy, limit), - this.flowService.getFlowsCount(models, conditions), + this.flowService.getFlowsCount(models, flowConditions), ]); // Map count result query to count object From b01eee285d836f482112d3580d69c915d31dffcb Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 14 Nov 2023 09:07:41 +0100 Subject: [PATCH 25/67] Temp: add api-core dep --- package.json | 2 +- yarn.lock | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 9df752b5..f347291f 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "lint": "yarn lint-prettier && yarn lint-eslint" }, "dependencies": { - "@unocha/hpc-api-core": "^7.0.0", + "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#8ca426e47d4a9431484815687096dda84747f0e2", "apollo-server-hapi": "^3.12.0", "bunyan": "^1.8.15", "class-validator": "^0.14.0", diff --git a/yarn.lock b/yarn.lock index 77cc98bc..746df1f4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1624,10 +1624,9 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@unocha/hpc-api-core@^7.0.0": +"@unocha/hpc-api-core@github:UN-OCHA/hpc-api-core#8ca426e47d4a9431484815687096dda84747f0e2": version "7.0.0" - resolved "https://registry.yarnpkg.com/@unocha/hpc-api-core/-/hpc-api-core-7.0.0.tgz#fcce4dfbef17476ee60cef794eea9c562d3f2ddb" - integrity sha512-itzYwSOMPo39tLXQP7J0PD/4dfweEerEReQ5R82WYMYwYVAdFVPOPMC0HuPBEPupWBc45peYciNGGCCdzTIuhA== + resolved "https://codeload.github.com/UN-OCHA/hpc-api-core/tar.gz/8ca426e47d4a9431484815687096dda84747f0e2" dependencies: "@types/lodash" "^4.14.194" "@types/node-fetch" "2.6.3" @@ -1637,6 +1636,7 @@ lodash "^4.17.21" node-fetch "2.6.9" pg "^8.11.3" + ts-node "^10.9.1" "@unocha/hpc-repo-tools@^4.0.0": version "4.0.0" From 1d6792400d99bc69643d1c45c31c581211c46b25 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 21 Nov 2023 11:04:07 +0100 Subject: [PATCH 26/67] Allow filter flow by multiples id --- .../categories/category-service.ts | 18 +++++++++--------- src/domain-services/flows/flow-service.ts | 2 +- src/domain-services/flows/graphql/args.ts | 6 +++--- src/domain-services/flows/graphql/resolver.ts | 6 +++--- src/domain-services/flows/graphql/types.ts | 4 ++-- src/domain-services/location/graphql/types.ts | 2 +- .../location/location-service.ts | 14 +++++++------- .../organizations/organization-service.ts | 10 +++++----- src/domain-services/plans/graphql/types.ts | 2 +- src/domain-services/plans/plan-service.ts | 10 +++++----- .../usage-years/usage-year-service.ts | 12 ++++++------ tests/unit/flow-search-service.spec.ts | 18 +++++++++--------- 12 files changed, 52 insertions(+), 52 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index 61099d94..20560423 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -1,14 +1,14 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { Category } from './graphql/types'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { type Category } from './graphql/types'; @Service() export class CategoryService { async getCategoriesForFlows( - flowLinks: Map[]>, + flowLinks: Map>>, models: Database ): Promise> { const flowLinksBrandedIds = []; @@ -23,7 +23,7 @@ export class CategoryService { return categoriesMap; } - const categoriesRef: InstanceDataOfModel[] = + const categoriesRef: Array> = await models.categoryRef.find({ where: { objectID: { @@ -32,7 +32,7 @@ export class CategoryService { }, }); - const categories: InstanceDataOfModel[] = + const categories: Array> = await models.category.find({ where: { id: { @@ -42,7 +42,7 @@ export class CategoryService { }); // Populate the map with categories for each flow - categoriesRef.forEach((catRef) => { + for (const catRef of categoriesRef) { const flowId = catRef.objectID.valueOf(); if (!categoriesMap.has(flowId)) { @@ -58,7 +58,7 @@ export class CategoryService { } categoriesPerFlow.push(this.mapCategoryToFlowCategory(category, catRef)); - }); + } return categoriesMap; } diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 536ea3d8..4920eecd 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -1,5 +1,5 @@ +import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { Service } from 'typedi'; -import { Database } from '@unocha/hpc-api-core/src/db/type'; @Service() export class FlowService { diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 155aa67f..7996e7b0 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -1,11 +1,11 @@ import { ArgsType, Field, InputType } from 'type-graphql'; -import { FlowSortField } from './types'; import { PaginationArgs } from '../../../utils/graphql/pagination'; +import { type FlowSortField } from './types'; @InputType() export class SearchFlowsFilters { - @Field({ nullable: true }) - id: number; + @Field(() => [Number], { nullable: true }) + id: number[]; @Field({ nullable: true }) activeStatus: boolean; diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 80a96a64..2852b580 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -1,9 +1,9 @@ -import { FlowPaged, FlowSearchResult, FlowSortField } from './types'; +import { Args, Ctx, Query, Resolver } from 'type-graphql'; import { Service } from 'typedi'; -import { Arg, Args, Ctx, Query, Resolver } from 'type-graphql'; -import { FlowSearchService } from '../flow-search-service'; import Context from '../../Context'; +import { FlowSearchService } from '../flow-search-service'; import { SearchFlowsArgs } from './args'; +import { FlowPaged, FlowSearchResult } from './types'; @Service() @Resolver(FlowPaged) diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 50fb8877..1307e47f 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -1,12 +1,12 @@ import { Field, ObjectType } from 'type-graphql'; -import { IItemPaged, PageInfo } from '../../../utils/graphql/pagination'; +import { BaseType } from '../../../utils/graphql/base-types'; +import { PageInfo, type IItemPaged } from '../../../utils/graphql/pagination'; import { Category } from '../../categories/graphql/types'; import { BaseLocation } from '../../location/graphql/types'; import { Organization } from '../../organizations/graphql/types'; import { BasePlan } from '../../plans/graphql/types'; import { ReportDetail } from '../../report-details/graphql/types'; import { UsageYear } from '../../usage-years/grpahql/types'; -import { BaseType } from '../../../utils/graphql/base-types'; @ObjectType() export class FlowExternalReference { diff --git a/src/domain-services/location/graphql/types.ts b/src/domain-services/location/graphql/types.ts index 76f10cf3..27f9ecec 100644 --- a/src/domain-services/location/graphql/types.ts +++ b/src/domain-services/location/graphql/types.ts @@ -1,7 +1,7 @@ -import { BaseType } from '../../../utils/graphql/base-types'; import { Brand } from '@unocha/hpc-api-core/src/util/types'; import { MaxLength } from 'class-validator'; import { Field, ID, Int, ObjectType, registerEnumType } from 'type-graphql'; +import { BaseType } from '../../../utils/graphql/base-types'; export enum LocationStatus { active = 'active', diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index 5221529c..eb83dbe0 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -1,10 +1,10 @@ +import { type LocationId } from '@unocha/hpc-api-core/src/db/models/location'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { BaseLocation } from './graphql/types'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { LocationId } from '@unocha/hpc-api-core/src/db/models/location'; +import { type BaseLocation } from './graphql/types'; @Service() export class LocationService { @@ -31,14 +31,14 @@ export class LocationService { } async getLocationsForFlows( - locationsFO: InstanceDataOfModel[], + locationsFO: Array>, models: Database ): Promise>> { const locationObjectsIDs: LocationId[] = locationsFO.map((locFO) => createBrandedValue(locFO.objectID) ); - const locations: InstanceDataOfModel[] = + const locations: Array> = await models.location.find({ where: { id: { @@ -49,7 +49,7 @@ export class LocationService { const locationsMap = new Map>(); - locationsFO.forEach((locFO) => { + for (const locFO of locationsFO) { const flowId = locFO.flowID; if (!locationsMap.has(flowId)) { locationsMap.set(flowId, new Set()); @@ -61,7 +61,7 @@ export class LocationService { } const locationMapped = this.mapLocationsToFlowLocations(location, locFO); locationsMap.get(flowId)!.add(locationMapped); - }); + } return locationsMap; } diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index a35e6637..411531c4 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -1,7 +1,7 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { Service } from 'typedi'; +import { type Database } from '@unocha/hpc-api-core/src/db'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { Organization } from './graphql/types'; +import { Service } from 'typedi'; +import { type Organization } from './graphql/types'; @Service() export class OrganizationService { @@ -16,7 +16,7 @@ export class OrganizationService { const organizationsMap = new Map(); - organizationsFO.forEach((orgFO) => { + for (const orgFO of organizationsFO) { const flowId = orgFO.flowID; if (!organizationsMap.has(flowId)) { @@ -39,7 +39,7 @@ export class OrganizationService { ); organizationsMap.get(flowId)!.push(organizationMapped); - }); + } return organizationsMap; } diff --git a/src/domain-services/plans/graphql/types.ts b/src/domain-services/plans/graphql/types.ts index 3316b717..9a6fe4fc 100644 --- a/src/domain-services/plans/graphql/types.ts +++ b/src/domain-services/plans/graphql/types.ts @@ -1,8 +1,8 @@ import { Brand } from '@unocha/hpc-api-core/src/util/types'; import { MaxLength } from 'class-validator'; import { Field, ID, Int, ObjectType } from 'type-graphql'; -import PlanTag from '../../plan-tag/graphql/types'; import { BaseType } from '../../../utils/graphql/base-types'; +import PlanTag from '../../plan-tag/graphql/types'; @ObjectType() export class PlanCaseload { diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index 63a97d2e..f4d98301 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -1,11 +1,11 @@ import { type PlanId } from '@unocha/hpc-api-core/src/db/models/plan'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { NotFoundError } from '@unocha/hpc-api-core/src/util/error'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { BasePlan } from './graphql/types'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { type BasePlan } from './graphql/types'; @Service() export class PlanService { @@ -49,13 +49,13 @@ export class PlanService { } async getPlansForFlows( - plansFO: InstanceDataOfModel[], + plansFO: Array>, models: Database ): Promise> { const planObjectsIDs: PlanId[] = plansFO.map((planFO) => createBrandedValue(planFO.objectID) ); - const plans: InstanceDataOfModel[] = + const plans: Array> = await models.plan.find({ where: { id: { @@ -83,7 +83,7 @@ export class PlanService { const planMapped = this.mapPlansToFlowPlans( plan, planVersion[0], - planFlowOobject?.refDirection || null + planFlowOobject?.refDirection ?? null ); if (flowId) { diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index bb74c9a6..dfb1e6f5 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -1,8 +1,8 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; +import { type Database } from '@unocha/hpc-api-core/src/db'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; -import { UsageYear } from './grpahql/types'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { type UsageYear } from './grpahql/types'; @Service() export class UsageYearService { @@ -10,7 +10,7 @@ export class UsageYearService { usageYearsFO: any[], models: Database ): Promise> { - const usageYears: InstanceDataOfModel[] = + const usageYears: Array> = await models.usageYear.find({ where: { id: { @@ -21,7 +21,7 @@ export class UsageYearService { const usageYearsMap = new Map(); - usageYearsFO.forEach((usageYearFO) => { + for (const usageYearFO of usageYearsFO) { const flowId = usageYearFO.flowID; if (!usageYearsMap.has(flowId)) { usageYearsMap.set(flowId, []); @@ -40,7 +40,7 @@ export class UsageYearService { usageYearFO.refDirection ); usageYearsMap.get(flowId)!.push(usageYearMapped); - }); + } return usageYearsMap; } diff --git a/tests/unit/flow-search-service.spec.ts b/tests/unit/flow-search-service.spec.ts index 7b2c655b..7797d044 100644 --- a/tests/unit/flow-search-service.spec.ts +++ b/tests/unit/flow-search-service.spec.ts @@ -1,8 +1,8 @@ import Container from 'typedi'; import { FlowSearchService } from '../../src/domain-services/flows/flow-search-service'; import { - FlowObjectFilters, SearchFlowsFilters, + type FlowObjectFilters, } from '../../src/domain-services/flows/graphql/args'; describe('FlowSearchService', () => { @@ -15,7 +15,7 @@ describe('FlowSearchService', () => { describe('PrepareFlowConditions', () => { it('should prepare flow conditions with valid filters', () => { const flowFilters = new SearchFlowsFilters(); - flowFilters.id = 1; + flowFilters.id = [1]; flowFilters.activeStatus = true; flowFilters.status = 'commitment'; flowFilters.type = 'carryover'; @@ -40,7 +40,7 @@ describe('FlowSearchService', () => { it('should prepare flow conditions with some filters set to undefined', () => { const flowFilters = new SearchFlowsFilters(); - flowFilters.id = 1; + flowFilters.id = [1]; flowFilters.activeStatus = true; const result = flowSearchService.prepareFlowConditions(flowFilters); @@ -61,14 +61,14 @@ describe('FlowSearchService', () => { it('should prepare flow conditions with some filters having falsy values', () => { const flowFilters = new SearchFlowsFilters(); - flowFilters.id = 0; + flowFilters.id = []; flowFilters.activeStatus = false; flowFilters.amountUSD = 0; const result = flowSearchService.prepareFlowConditions(flowFilters); expect(result).toEqual({ - id: 0, + id: [], activeStatus: false, amountUSD: 0, }); @@ -77,7 +77,7 @@ describe('FlowSearchService', () => { describe('prepareFlowObjectConditions', () => { it('should prepare flow object conditions correctly', () => { const flowObjectFilters: FlowObjectFilters[] = [ - { objectType: 'organization', direction: 'source', objectID: 12469 }, + { objectType: 'organization', direction: 'source', objectID: 12_469 }, { objectType: 'organization', direction: 'destination', @@ -92,7 +92,7 @@ describe('FlowSearchService', () => { [ 'organization', new Map([ - ['source', [12469]], + ['source', [12_469]], ['destination', [5197]], ]), ], @@ -103,8 +103,8 @@ describe('FlowSearchService', () => { it('should throw an error for duplicate flow object filter', () => { const flowObjectFilters: FlowObjectFilters[] = [ - { objectType: 'organization', direction: 'source', objectID: 12469 }, - { objectType: 'organization', direction: 'source', objectID: 12469 }, // Duplicate filter + { objectType: 'organization', direction: 'source', objectID: 12_469 }, + { objectType: 'organization', direction: 'source', objectID: 12_469 }, // Duplicate filter ]; expect(() => From 3d788f57c5e811880b5305ba906e6af53683bf3a Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 21 Nov 2023 11:04:34 +0100 Subject: [PATCH 27/67] Fix incorrect query when multiple flowObject filter --- .../flows/flow-search-service.ts | 113 ++++++++++-------- .../impl/flow-object-conditions-strategy.ts | 54 ++++++--- src/utils/graphql/pagination.ts | 4 +- 3 files changed, 103 insertions(+), 68 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index bbf48472..71a60111 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,37 +1,37 @@ -import { Database } from '@unocha/hpc-api-core/src/db/type'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { - FlowObjectFilters, - SearchFlowsArgs, - SearchFlowsFilters, -} from './graphql/args'; -import { - FlowPaged, - FlowParkedParentSource, - FlowSearchResult, -} from './graphql/types'; -import { FlowSearchStrategy } from './strategy/flow-search-strategy'; -import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; -import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { CategoryService } from '../categories/category-service'; +import { type Category } from '../categories/graphql/types'; import { ExternalReferenceService } from '../external-reference/external-reference-service'; +import { FlowLinkService } from '../flow-link/flow-link-service'; +import { FlowObjectService } from '../flow-object/flow-object-service'; +import { type FlowObject } from '../flow-object/model'; +import { type BaseLocation } from '../location/graphql/types'; import { LocationService } from '../location/location-service'; +import { type Organization } from '../organizations/graphql/types'; import { OrganizationService } from '../organizations/organization-service'; +import { type BasePlan } from '../plans/graphql/types'; import { PlanService } from '../plans/plan-service'; import { ReportDetailService } from '../report-details/report-detail-service'; +import { type UsageYear } from '../usage-years/grpahql/types'; import { UsageYearService } from '../usage-years/usage-year-service'; -import { FlowLinkService } from '../flow-link/flow-link-service'; -import { FlowObject } from '../flow-object/model'; -import { FlowObjectService } from '../flow-object/flow-object-service'; -import { FlowEntity } from './model'; -import { Category } from '../categories/graphql/types'; -import { Organization } from '../organizations/graphql/types'; -import { BaseLocation } from '../location/graphql/types'; -import { BasePlan } from '../plans/graphql/types'; -import { UsageYear } from '../usage-years/grpahql/types'; +import { + type FlowObjectFilters, + type SearchFlowsArgs, + type SearchFlowsFilters, +} from './graphql/args'; +import { + type FlowPaged, + type FlowParkedParentSource, + type FlowSearchResult, +} from './graphql/types'; +import { type FlowEntity } from './model'; +import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; +import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy'; +import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; @Service() export class FlowSearchService { @@ -144,14 +144,14 @@ export class FlowSearchService { const items = await Promise.all( flows.map(async (flow) => { - const flowLink = flowLinksMap.get(flow.id) || []; - const categories = categoriesMap.get(flow.id) || []; - const organizations = organizationsMap.get(flow.id) || []; - const locations = [...(locationsMap.get(flow.id) || [])]; - const plans = plansMap.get(flow.id) || []; - const usageYears = usageYearsMap.get(flow.id) || []; - const externalReferences = externalReferencesMap.get(flow.id) || []; - const reportDetails = reportDetailsMap.get(flow.id) || []; + const flowLink = flowLinksMap.get(flow.id) ?? []; + const categories = categoriesMap.get(flow.id) ?? []; + const organizations = organizationsMap.get(flow.id) ?? []; + const locations = [...(locationsMap.get(flow.id) ?? [])]; + const plans = plansMap.get(flow.id) ?? []; + const usageYears = usageYearsMap.get(flow.id) ?? []; + const externalReferences = externalReferencesMap.get(flow.id) ?? []; + const reportDetails = reportDetailsMap.get(flow.id) ?? []; let parkedParentSource: FlowParkedParentSource[] = []; if (flow.activeStatus && flowLink.length > 0) { @@ -197,7 +197,7 @@ export class FlowSearchService { hasNextPage: limit <= flows.length, hasPreviousPage: afterCursor !== undefined, startCursor: flows.length ? flows[0].id.valueOf() : 0, - endCursor: flows.length ? flows[flows.length - 1].id.valueOf() : 0, + endCursor: flows.length ? flows.at(-1)?.id.valueOf() ?? 0 : 0, pageSize: flows.length, sortField: orderBy.column, sortOrder: orderBy.order, @@ -209,11 +209,15 @@ export class FlowSearchService { let flowConditions = {}; if (flowFilters) { - Object.entries(flowFilters).forEach(([key, value]) => { + for (const [key, value] of Object.entries(flowFilters)) { if (value !== undefined) { - flowConditions = { ...flowConditions, [key]: value }; + if (Array.isArray(value) && value.length !== 0) { + flowConditions = { ...flowConditions, [key]: { [Op.IN]: value } }; + } else { + flowConditions = { ...flowConditions, [key]: value }; + } } - }); + } } return flowConditions; @@ -222,13 +226,16 @@ export class FlowSearchService { prepareFlowObjectConditions( flowObjectFilters: FlowObjectFilters[] ): Map> { - const flowObjectsConditions: Map> = new Map(); + const flowObjectsConditions: Map> = new Map< + string, + Map + >(); - flowObjectFilters.forEach((flowObjectFilter) => { + for (const flowObjectFilter of flowObjectFilters) { const { objectType, direction, objectID } = flowObjectFilter; if (!flowObjectsConditions.has(objectType)) { - flowObjectsConditions.set(objectType, new Map()); + flowObjectsConditions.set(objectType, new Map()); } const refDirectionMap = flowObjectsConditions.get(objectType); @@ -245,7 +252,7 @@ export class FlowSearchService { } objectIDsArray!.push(objectID); - }); + } return flowObjectsConditions; } @@ -255,15 +262,22 @@ export class FlowSearchService { flowObjectFilters: FlowObjectFilters[] ): { strategy: FlowSearchStrategy; conditions: any } { let conditions = {}; + + const isFlowFilterDefined = flowFilters !== undefined; + const isFlowObjectFilterDefined = flowObjectFilters !== undefined; + const isFlowObjectFiltersNotEmpty = + isFlowObjectFilterDefined && flowObjectFilters.length !== 0; + if ( - (!flowFilters && - (!flowObjectFilters || flowObjectFilters.length === 0)) || - (flowFilters && (!flowObjectFilters || flowObjectFilters.length === 0)) + (!isFlowFilterDefined && + (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty)) || + (isFlowFilterDefined && + (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty)) ) { const flowConditions = this.prepareFlowConditions(flowFilters); conditions = { ...conditions, ...flowConditions }; return { strategy: this.onlyFlowFiltersStrategy, conditions }; - } else if (!flowFilters && flowObjectFilters.length !== 0) { + } else if (!isFlowFilterDefined && isFlowObjectFiltersNotEmpty) { const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); conditions = { ...conditions, ...flowObjectConditions }; @@ -272,7 +286,7 @@ export class FlowSearchService { strategy: this.flowObjectFiltersStrategy, conditions: this.buildConditionsMap(undefined, flowObjectConditions), }; - } else if (flowFilters && flowObjectFilters.length !== 0) { + } else if (isFlowFilterDefined && isFlowObjectFiltersNotEmpty) { const flowConditions = this.prepareFlowConditions(flowFilters); const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); @@ -291,9 +305,7 @@ export class FlowSearchService { }; } - throw new Error( - 'Invalid combination of flowFilters and flowObjectFilters - temp: only provide flowFilters' - ); + throw new Error('Invalid combination of flowFilters and flowObjectFilters'); } private buildConditionsMap(flowConditions: any, flowObjectConditions: any) { @@ -302,6 +314,7 @@ export class FlowSearchService { conditionsMap.set('flow', flowConditions); return conditionsMap; } + private mapFlowObjects( flowObjects: FlowObject[], organizationsFO: any[], @@ -309,7 +322,7 @@ export class FlowSearchService { plansFO: any[], usageYearsFO: any[] ) { - flowObjects.forEach((flowObject) => { + for (const flowObject of flowObjects) { if (flowObject.objectType === 'organization') { organizationsFO.push(flowObject); } else if (flowObject.objectType === 'location') { @@ -319,7 +332,7 @@ export class FlowSearchService { } else if (flowObject.objectType === 'usageYear') { usageYearsFO.push(flowObject); } - }); + } } private async getParketParents( diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts index 3ccb19bd..81390da8 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -1,11 +1,12 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; -import { FlowService } from '../../flow-service'; +import { type FlowService } from '../../flow-service'; import { - FlowSearchStrategy, - FlowSearchStrategyResponse, + type FlowSearchStrategy, + type FlowSearchStrategyResponse, } from '../flow-search-strategy'; @Service() @@ -22,22 +23,23 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { cursorCondition: any, models: Database ): Promise { + // Obtain flowObjects conditions const flowObjectsConditions: Map< string, Map - > = flowConditions.get('flowObjects'); - const flowEntityConditions = flowConditions.get('flow'); + > = flowConditions.get('flowObjects') ?? new Map(); + // Obtain flow conditions + const flowEntityConditions = flowConditions.get('flow') ?? new Map(); + + // Obtain where clause for flowObjects const flowObjectWhere = this.mapFlowObjectConditionsToWhereClause( flowObjectsConditions ); // Obtain flowIDs based on provided flowObject conditions - const flowIDsFromFilteredFlowObjects = - await this.flowObjectService.getFlowIdsFromFlowObjects( - models, - flowObjectWhere - ); + const flowIDsFromFilteredFlowObjects: FlowId[] = + await this.getFlowIDsFromFilteredFlowObjects(models, flowObjectWhere); // Combine conditions from flowObjects FlowIDs and flow conditions const mergedFlowConditions = { @@ -62,6 +64,25 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { return { flows, count: countObject.count }; } + private async getFlowIDsFromFilteredFlowObjects( + models: Database, + flowObjectWhere: any[] + ): Promise { + const flowIDsFromFilteredFlowObjects: FlowId[] = []; + const tempFlowIDs: FlowId[][] = await Promise.all( + flowObjectWhere.map((whereClause) => + this.flowObjectService.getFlowIdsFromFlowObjects(models, whereClause) + ) + ); + // Flatten array of arrays keeping only values present in all arrays + const flowIDs = tempFlowIDs.reduce((a, b) => + a.filter((c) => b.includes(c)) + ); + flowIDsFromFilteredFlowObjects.push(...flowIDs); + + return flowIDsFromFilteredFlowObjects; + } + /* * Map structure: * { @@ -74,12 +95,11 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { */ private mapFlowObjectConditionsToWhereClause( flowObjectConditions: Map> - ): any { - let flowObjectWhere: any = {}; + ): any[] { + const whereClauses: any = []; for (const [objectType, refDirectionMap] of flowObjectConditions) { for (const [refDirection, objectIDs] of refDirectionMap) { - flowObjectWhere = { - ...flowObjectWhere, + const whereClause = { objectID: { [Op.IN]: objectIDs, }, @@ -90,9 +110,11 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { [Op.LIKE]: objectType, }, }; + + whereClauses.push(whereClause); } } - return flowObjectWhere; + return whereClauses; } } diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index c4765253..d53648b4 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -1,6 +1,6 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; -import { ObjectType, Field, ArgsType } from 'type-graphql'; +import { ArgsType, Field, ObjectType } from 'type-graphql'; export type SortOrder = 'asc' | 'desc'; @@ -46,7 +46,7 @@ export function prepareConditionFromCursor( if (afterCursor || beforeCursor) { const isAscending = sortCondition.order === 'asc'; - const cursorValue = afterCursor || beforeCursor; + const cursorValue = afterCursor ?? beforeCursor; let op; if (isAscending) { From 276323453fe80b9e5a3ed56f38c667737d41fe0a Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 16 Nov 2023 13:03:49 +0100 Subject: [PATCH 28/67] Fix lint problems --- .../external-reference-service.ts | 16 ++++++++-------- .../flow-link/flow-link-service.ts | 14 +++++++------- .../flow-object/flow-object-service.ts | 4 ++-- src/domain-services/flow-object/model.ts | 4 ++-- src/domain-services/flows/model.ts | 4 ++-- .../flows/strategy/flow-search-strategy.ts | 4 ++-- .../impl/flow-object-conditions-strategy.ts | 2 +- .../impl/only-flow-conditions-strategy.ts | 6 +++--- .../report-details/report-detail-service.ts | 14 +++++++------- 9 files changed, 34 insertions(+), 34 deletions(-) diff --git a/src/domain-services/external-reference/external-reference-service.ts b/src/domain-services/external-reference/external-reference-service.ts index 5c016473..15afaa35 100644 --- a/src/domain-services/external-reference/external-reference-service.ts +++ b/src/domain-services/external-reference/external-reference-service.ts @@ -1,9 +1,9 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; -import { FlowExternalReference } from '../flows/graphql/types'; +import { type FlowExternalReference } from '../flows/graphql/types'; @Service() export class ExternalReferenceService { @@ -19,11 +19,11 @@ export class ExternalReferenceService { const externalReferencesMap = new Map(); - flowIDs.forEach((flowID) => { + for (const flowID of flowIDs) { externalReferencesMap.set(flowID, []); - }); + } - externalReferences.forEach((externalReference) => { + for (const externalReference of externalReferences) { const flowID = externalReference.flowID; const externalReferenceMapped = this.mapExternalReferenceToExternalReferenceFlows(externalReference); @@ -33,7 +33,7 @@ export class ExternalReferenceService { } externalReferencesMap.get(flowID).push(externalReferenceMapped); - }); + } return externalReferencesMap; } diff --git a/src/domain-services/flow-link/flow-link-service.ts b/src/domain-services/flow-link/flow-link-service.ts index 8252340c..4952c7a2 100644 --- a/src/domain-services/flow-link/flow-link-service.ts +++ b/src/domain-services/flow-link/flow-link-service.ts @@ -1,7 +1,7 @@ -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Database } from '@unocha/hpc-api-core/src/db/type'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; +import { type InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; import { Service } from 'typedi'; @Service() @@ -9,7 +9,7 @@ export class FlowLinkService { async getFlowLinksForFlows( flowIds: FlowId[], models: Database - ): Promise[]>> { + ): Promise>>> { const flowLinks = await models.flowLink.find({ where: { childID: { @@ -21,11 +21,11 @@ export class FlowLinkService { // Group flowLinks by flow ID for easy mapping const flowLinksMap = new Map< number, - InstanceOfModel[] + Array> >(); // Populate the map with flowLinks for each flow - flowLinks.forEach((flowLink) => { + for (const flowLink of flowLinks) { const flowId = flowLink.childID.valueOf(); if (!flowLinksMap.has(flowId)) { @@ -35,7 +35,7 @@ export class FlowLinkService { const flowLinksForFlow = flowLinksMap.get(flowId)!; flowLinksForFlow.push(flowLink); - }); + } return flowLinksMap; } diff --git a/src/domain-services/flow-object/flow-object-service.ts b/src/domain-services/flow-object/flow-object-service.ts index 049bd03d..aeb25120 100644 --- a/src/domain-services/flow-object/flow-object-service.ts +++ b/src/domain-services/flow-object/flow-object-service.ts @@ -1,5 +1,5 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; diff --git a/src/domain-services/flow-object/model.ts b/src/domain-services/flow-object/model.ts index 1d666288..e8c19d72 100644 --- a/src/domain-services/flow-object/model.ts +++ b/src/domain-services/flow-object/model.ts @@ -1,4 +1,4 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; export type FlowObject = InstanceOfModel; diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index b5d35436..3c425a7e 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -1,4 +1,4 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; export type FlowEntity = InstanceDataOfModel; diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index cd15f90e..ff11e0b5 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -1,5 +1,5 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { FlowEntity } from '../model'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowEntity } from '../model'; export interface FlowSearchStrategyResponse { flows: FlowEntity[]; diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts index 81390da8..c0dbed8d 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -3,7 +3,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; -import { type FlowService } from '../../flow-service'; +import { FlowService } from '../../flow-service'; import { type FlowSearchStrategy, type FlowSearchStrategyResponse, diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts index 2af8506b..d6557e9e 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -1,9 +1,9 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; +import { type Database } from '@unocha/hpc-api-core/src/db'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; import { - FlowSearchStrategy, - FlowSearchStrategyResponse, + type FlowSearchStrategy, + type FlowSearchStrategyResponse, } from '../flow-search-strategy'; @Service() diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index 70b8b015..9d0f898a 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -1,16 +1,16 @@ -import { Database } from '@unocha/hpc-api-core/src/db'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; -import { ReportDetail } from './graphql/types'; +import { type ReportDetail } from './graphql/types'; @Service() export class ReportDetailService { async getReportDetailsForFlows( flowIds: FlowId[], models: Database ): Promise> { - const reportDetails: InstanceDataOfModel[] = + const reportDetails: Array> = await models.reportDetail.find({ where: { flowID: { @@ -22,7 +22,7 @@ export class ReportDetailService { const reportDetailsMap = new Map(); - flowIds.forEach((flowId: FlowId) => { + for (const flowId of flowIds) { if (!reportDetailsMap.has(flowId)) { reportDetailsMap.set(flowId, []); } @@ -35,7 +35,7 @@ export class ReportDetailService { this.mapReportDetailsToFlowReportDetail(reportDetail); reportDetailsMap.get(flowId)?.push(reportDetailMapped); } - }); + } return reportDetailsMap; } From f8a7066d57fa659e5f676e74fea03ba5dadd9ce8 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 21 Nov 2023 12:26:11 +0100 Subject: [PATCH 29/67] Fix duplicated entries on lists List with no entries are returned as empty lists --- .../categories/category-service.ts | 5 ++- .../flows/flow-search-service.ts | 37 ++++++++++--------- src/domain-services/flows/graphql/types.ts | 30 +++++++-------- .../location/location-service.ts | 16 +++++--- .../organizations/organization-service.ts | 18 +++++---- src/domain-services/plans/plan-service.ts | 33 +++++++++++------ .../report-details/report-detail-service.ts | 13 ++++++- .../usage-years/usage-year-service.ts | 13 ++++--- 8 files changed, 100 insertions(+), 65 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index 20560423..c64c54b4 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -57,7 +57,10 @@ export class CategoryService { throw new Error(`Category with ID ${catRef.categoryID} does not exist`); } - categoriesPerFlow.push(this.mapCategoryToFlowCategory(category, catRef)); + if (!categoriesPerFlow.some((cat) => cat.id === category.id.valueOf())) { + const mappedCategory = this.mapCategoryToFlowCategory(category, catRef); + categoriesPerFlow.push(mappedCategory); + } } return categoriesMap; diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 71a60111..896128b5 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -147,7 +147,7 @@ export class FlowSearchService { const flowLink = flowLinksMap.get(flow.id) ?? []; const categories = categoriesMap.get(flow.id) ?? []; const organizations = organizationsMap.get(flow.id) ?? []; - const locations = [...(locationsMap.get(flow.id) ?? [])]; + const locations = locationsMap.get(flow.id) ?? []; const plans = plansMap.get(flow.id) ?? []; const usageYears = usageYearsMap.get(flow.id) ?? []; const externalReferences = externalReferencesMap.get(flow.id) ?? []; @@ -162,19 +162,21 @@ export class FlowSearchService { ); } - const childIDs: number[] = flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.childID.valueOf()) as number[]; - - const parentIDs: number[] = flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.parentID.valueOf()) as number[]; + const childIDs: number[] = + (flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.childID.valueOf()) as number[]) ?? []; + + const parentIDs: number[] = + (flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.parentID.valueOf()) as number[]) ?? []; return this.buildFlowDTO( flow, @@ -431,12 +433,11 @@ export class FlowSearchService { usageYears, childIDs, parentIDs, - origAmount: flow.origAmount ? flow.origAmount.toString() : '', - origCurrency: flow.origCurrency ? flow.origCurrency.toString() : '', + origAmount: flow.origAmount ? flow.origAmount.toString() : null, + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : null, externalReferences, reportDetails, - parkedParentSource: - parkedParentSource.length > 0 ? parkedParentSource : null, + parkedParentSource, // Paged item field cursor: flow.id.valueOf(), }; diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 1307e47f..cfa313bb 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -61,41 +61,41 @@ export class BaseFlow extends BaseType { @ObjectType() export class Flow extends BaseFlow { - @Field(() => [Category], { nullable: true }) + @Field(() => [Category], { nullable: false }) categories: Category[]; - @Field(() => [Organization], { nullable: true }) + @Field(() => [Organization], { nullable: false }) organizations: Organization[]; - @Field(() => [BasePlan], { nullable: true }) + @Field(() => [BasePlan], { nullable: false }) plans: BasePlan[]; - @Field(() => [BaseLocation], { nullable: true }) + @Field(() => [BaseLocation], { nullable: false }) locations: BaseLocation[]; - @Field(() => [UsageYear], { nullable: true }) + @Field(() => [UsageYear], { nullable: false }) usageYears: UsageYear[]; - @Field(() => [Number], { nullable: true }) + @Field(() => [Number], { nullable: false }) childIDs: number[]; - @Field(() => [Number], { nullable: true }) + @Field(() => [Number], { nullable: false }) parentIDs: number[]; - @Field({ nullable: true }) - origAmount: string; + @Field(() => String, { nullable: true }) + origAmount: string | null; - @Field({ nullable: true }) - origCurrency: string; + @Field(() => String, { nullable: true }) + origCurrency: string | null; - @Field(() => [FlowExternalReference], { nullable: true }) + @Field(() => [FlowExternalReference], { nullable: false }) externalReferences: FlowExternalReference[]; - @Field(() => [ReportDetail], { nullable: true }) + @Field(() => [ReportDetail], { nullable: false }) reportDetails: ReportDetail[]; - @Field(() => [FlowParkedParentSource], { nullable: true }) - parkedParentSource: FlowParkedParentSource[] | null; + @Field(() => [FlowParkedParentSource], { nullable: false }) + parkedParentSource: FlowParkedParentSource[]; } @ObjectType() diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index eb83dbe0..cb6311da 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -33,7 +33,7 @@ export class LocationService { async getLocationsForFlows( locationsFO: Array>, models: Database - ): Promise>> { + ): Promise> { const locationObjectsIDs: LocationId[] = locationsFO.map((locFO) => createBrandedValue(locFO.objectID) ); @@ -47,20 +47,26 @@ export class LocationService { }, }); - const locationsMap = new Map>(); + const locationsMap = new Map(); for (const locFO of locationsFO) { const flowId = locFO.flowID; if (!locationsMap.has(flowId)) { - locationsMap.set(flowId, new Set()); + locationsMap.set(flowId, []); } const location = locations.find((loc) => loc.id === locFO.objectID); if (!location) { throw new Error(`Location with ID ${locFO.objectID} does not exist`); } - const locationMapped = this.mapLocationsToFlowLocations(location, locFO); - locationsMap.get(flowId)!.add(locationMapped); + const locationsPerFlow = locationsMap.get(flowId)!; + if (!locationsPerFlow.some((loc) => loc.id === location.id)) { + const locationMapped = this.mapLocationsToFlowLocations( + location, + locFO + ); + locationsPerFlow.push(locationMapped); + } } return locationsMap; } diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 411531c4..2a73dae0 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -32,13 +32,17 @@ export class OrganizationService { ); } - const organizationMapped: Organization = - this.mapOrganizationsToOrganizationFlows( - organization, - orgFO.refDirection - ); - - organizationsMap.get(flowId)!.push(organizationMapped); + const organizationPerFlow = organizationsMap.get(flowId)!; + if ( + !organizationPerFlow.some((org) => org.id === organization.id.valueOf()) + ) { + const organizationMapped: Organization = + this.mapOrganizationsToOrganizationFlows( + organization, + orgFO.refDirection + ); + organizationsMap.get(flowId)!.push(organizationMapped); + } } return organizationsMap; diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index f4d98301..d7aaa617 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -74,24 +74,33 @@ export class PlanService { }, }); - const planFlowOobject = plansFO.find( + const planFlowObject = plansFO.find( (planFO) => planFO.objectID === plan.id ); - const flowId = planFlowOobject && planFlowOobject.flowID; + if (!planVersion.length) { + throw new Error(`Plan with ID ${plan.id} does not have a version`); + } - const planMapped = this.mapPlansToFlowPlans( - plan, - planVersion[0], - planFlowOobject?.refDirection ?? null - ); + if (!planFlowObject) { + throw new Error(`Plan with ID ${plan.id} does not have a flow object`); + } + + const flowId = planFlowObject && planFlowObject.flowID; + + if (!plansMap.has(flowId)) { + plansMap.set(flowId, []); + } - if (flowId) { - if (!plansMap.has(flowId)) { - plansMap.set(flowId, []); - } + const plansPerFlow = plansMap.get(flowId)!; - plansMap.get(flowId)!.push(planMapped); + if (!plansPerFlow.some((plan) => plan.id === plan.id)) { + const planMapped = this.mapPlansToFlowPlans( + plan, + planVersion[0], + planFlowObject?.refDirection ?? null + ); + plansPerFlow.push(planMapped); } } diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index 9d0f898a..b7ef5533 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -30,10 +30,19 @@ export class ReportDetailService { (report) => report && flowId === report?.flowID ); - if (reportDetail) { + if (!reportDetail) { + throw new Error(`Report detail with flow ID ${flowId} does not exist`); + } + + const reportDetailsPerFlow = reportDetailsMap.get(flowId)!; + if ( + !reportDetailsPerFlow.some( + (report) => report.id === reportDetail.id.valueOf() + ) + ) { const reportDetailMapped = this.mapReportDetailsToFlowReportDetail(reportDetail); - reportDetailsMap.get(flowId)?.push(reportDetailMapped); + reportDetailsPerFlow.push(reportDetailMapped); } } diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index dfb1e6f5..7a495460 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -35,11 +35,14 @@ export class UsageYearService { `Usage year with ID ${usageYearFO.objectID} does not exist` ); } - const usageYearMapped = this.mapUsageYearsToFlowUsageYears( - usageYear, - usageYearFO.refDirection - ); - usageYearsMap.get(flowId)!.push(usageYearMapped); + const usageYearsPerFlow = usageYearsMap.get(flowId)!; + if (!usageYearsPerFlow.some((uYear) => uYear.year === usageYear.year)) { + const usageYearMapped = this.mapUsageYearsToFlowUsageYears( + usageYear, + usageYearFO.refDirection + ); + usageYearsPerFlow.push(usageYearMapped); + } } return usageYearsMap; From d638aa40ea07c6a0127305111f48e3488d9e568d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 21 Nov 2023 14:27:02 +0100 Subject: [PATCH 30/67] Add channel property to ReportDetails Fix date format Correct naming for method 'groupByFlowObjectType' --- .../flows/flow-search-service.ts | 20 ++++++++++++------- .../report-details/graphql/types.ts | 3 +++ .../report-details/report-detail-service.ts | 20 ++++++++++++++++++- .../usage-years/usage-year-service.ts | 4 ++-- 4 files changed, 37 insertions(+), 10 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 896128b5..238a8e8e 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -108,7 +108,7 @@ export class FlowSearchService { const plansFO: FlowObject[] = []; const usageYearsFO: FlowObject[] = []; - this.mapFlowObjects( + this.groupByFlowObjectType( flowObjects, organizationsFO, locationsFO, @@ -153,6 +153,12 @@ export class FlowSearchService { const externalReferences = externalReferencesMap.get(flow.id) ?? []; const reportDetails = reportDetailsMap.get(flow.id) ?? []; + const reportDetailsWithChannel = + this.reportDetailService.addChannelToReportDetails( + reportDetails, + categories + ); + let parkedParentSource: FlowParkedParentSource[] = []; if (flow.activeStatus && flowLink.length > 0) { parkedParentSource = await this.getParketParents( @@ -188,7 +194,7 @@ export class FlowSearchService { childIDs, parentIDs, externalReferences, - reportDetails, + reportDetailsWithChannel, parkedParentSource ); }) @@ -317,12 +323,12 @@ export class FlowSearchService { return conditionsMap; } - private mapFlowObjects( + private groupByFlowObjectType( flowObjects: FlowObject[], - organizationsFO: any[], - locationsFO: any[], - plansFO: any[], - usageYearsFO: any[] + organizationsFO: FlowObject[], + locationsFO: FlowObject[], + plansFO: FlowObject[], + usageYearsFO: FlowObject[] ) { for (const flowObject of flowObjects) { if (flowObject.objectType === 'organization') { diff --git a/src/domain-services/report-details/graphql/types.ts b/src/domain-services/report-details/graphql/types.ts index 3d4ddfa9..175f3c73 100644 --- a/src/domain-services/report-details/graphql/types.ts +++ b/src/domain-services/report-details/graphql/types.ts @@ -32,4 +32,7 @@ export class ReportDetail extends BaseType { @Field(() => Number, { nullable: true }) organizationID: number | null; + + @Field(() => String, { nullable: true }) + channel: string | null; } diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index b7ef5533..e702ddde 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -3,6 +3,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; +import { type Category } from '../categories/graphql/types'; import { type ReportDetail } from './graphql/types'; @Service() export class ReportDetailService { @@ -58,13 +59,30 @@ export class ReportDetailService { versionID: reportDetail.versionID, contactInfo: reportDetail.contactInfo, source: reportDetail.source, - date: reportDetail.date, + date: reportDetail.date + ? new Date(reportDetail.date).toISOString() + : null, sourceID: reportDetail.sourceID, refCode: reportDetail.refCode, verified: reportDetail.verified, createdAt: reportDetail.createdAt.toISOString(), updatedAt: reportDetail.updatedAt.toISOString(), organizationID: reportDetail.organizationID, + channel: null, }; } + + addChannelToReportDetails( + reportDetails: ReportDetail[], + categories: Category[] + ) { + for (const reportDetail of reportDetails) { + const category = categories.find((cat) => cat.group === 'reportChannel'); + + if (category) { + reportDetail.channel = category.name; + } + } + return reportDetails; + } } diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index 7a495460..d6463108 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -52,8 +52,8 @@ export class UsageYearService { return { year: usageYear.year, direction: refDirection, - createdAt: usageYear.createdAt, - updatedAt: usageYear.updatedAt, + createdAt: usageYear.createdAt.toISOString(), + updatedAt: usageYear.updatedAt.toISOString(), }; } } From cfba15325d1d0c4e4346d7050ac13f969703c59a Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 23 Nov 2023 10:11:00 +0100 Subject: [PATCH 31/67] Add totalAmountUDS resolver --- .../flows/flow-search-service.ts | 34 +++++++++++++++++-- src/domain-services/flows/flow-service.ts | 4 +-- src/domain-services/flows/graphql/args.ts | 15 ++++++++ src/domain-services/flows/graphql/resolver.ts | 17 ++++++++-- src/domain-services/flows/graphql/types.ts | 9 +++++ .../flows/strategy/flow-search-strategy.ts | 8 ++--- .../impl/flow-object-conditions-strategy.ts | 8 ++--- .../impl/only-flow-conditions-strategy.ts | 8 ++--- 8 files changed, 85 insertions(+), 18 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 238a8e8e..07088c9e 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -21,12 +21,14 @@ import { UsageYearService } from '../usage-years/usage-year-service'; import { type FlowObjectFilters, type SearchFlowsArgs, + type SearchFlowsArgsNonPaginated, type SearchFlowsFilters, } from './graphql/args'; import { type FlowPaged, type FlowParkedParentSource, type FlowSearchResult, + type FlowSearchTotalAmountResult, } from './graphql/types'; import { type FlowEntity } from './model'; import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; @@ -79,10 +81,10 @@ export class FlowSearchService { // Obtain flows and its count based on the strategy selected const { flows, count } = await strategy.search( conditions, + models, orderBy, limitComputed, - cursorCondition, - models + cursorCondition ); // Remove the extra item used to check hasNextPage @@ -448,4 +450,32 @@ export class FlowSearchService { cursor: flow.id.valueOf(), }; } + + async searchTotalAmount( + models: Database, + args: SearchFlowsArgsNonPaginated + ): Promise { + const { flowFilters, flowObjectFilters } = args; + + const { strategy, conditions } = this.determineStrategy( + flowFilters, + flowObjectFilters + ); + + const { flows, count } = await strategy.search(conditions, models); + + const flowsAmountUSD: Array = flows.map( + (flow) => flow.amountUSD + ); + + const totalAmount = flowsAmountUSD.reduce((a, b) => +a + +b, 0); + + return { + totalAmountUSD: totalAmount.toLocaleString(undefined, { + minimumFractionDigits: 2, + maximumFractionDigits: 2, + }), + flowsCount: count, + }; + } } diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 4920eecd..353a8104 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -8,8 +8,8 @@ export class FlowService { async getFlows( models: Database, conditions: any, - orderBy: any, - limit: number + orderBy?: any, + limit?: number ) { return await models.flow.find({ orderBy, diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 7996e7b0..e5ee56a9 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -72,3 +72,18 @@ export class SearchFlowsArgs extends PaginationArgs { @Field({ nullable: true }) includeChildrenOfParkedFlows: boolean; } + +@ArgsType() +export class SearchFlowsArgsNonPaginated { + @Field(() => SearchFlowsFilters, { nullable: true }) + flowFilters: SearchFlowsFilters; + + @Field(() => [FlowObjectFilters], { nullable: true }) + flowObjectFilters: FlowObjectFilters[]; + + @Field(() => [FlowCategory], { nullable: true }) + categoryFilters: FlowCategory[]; + + @Field({ nullable: true }) + includeChildrenOfParkedFlows: boolean; +} diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 2852b580..ad49913d 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -2,8 +2,12 @@ import { Args, Ctx, Query, Resolver } from 'type-graphql'; import { Service } from 'typedi'; import Context from '../../Context'; import { FlowSearchService } from '../flow-search-service'; -import { SearchFlowsArgs } from './args'; -import { FlowPaged, FlowSearchResult } from './types'; +import { SearchFlowsArgs, SearchFlowsArgsNonPaginated } from './args'; +import { + FlowPaged, + FlowSearchResult, + FlowSearchTotalAmountResult, +} from './types'; @Service() @Resolver(FlowPaged) @@ -18,4 +22,13 @@ export default class FlowResolver { ): Promise { return await this.flowSearchService.search(context.models, args); } + + @Query(() => FlowSearchTotalAmountResult) + async searchFlowsTotalAmountUSD( + @Ctx() context: Context, + @Args(() => SearchFlowsArgsNonPaginated, { validate: false }) + args: SearchFlowsArgsNonPaginated + ): Promise { + return await this.flowSearchService.searchTotalAmount(context.models, args); + } } diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index cfa313bb..ec865f31 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -110,6 +110,15 @@ export class FlowSearchResult extends PageInfo { flows: FlowPaged[]; } +@ObjectType() +export class FlowSearchTotalAmountResult { + @Field(() => String, { nullable: false }) + totalAmountUSD: string; + + @Field(() => Number, { nullable: false }) + flowsCount: number; +} + export type FlowSortField = | 'id' | 'versionID' diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index ff11e0b5..2d2d4015 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -9,9 +9,9 @@ export interface FlowSearchStrategyResponse { export interface FlowSearchStrategy { search( flowConditions: Map, - orderBy: any, - limit: number, - cursorCondition: any, - models: Database + models: Database, + orderBy?: any, + limit?: number, + cursorCondition?: any ): Promise; } diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts index c0dbed8d..47bacfcc 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -18,10 +18,10 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { async search( flowConditions: Map, - orderBy: any, - limit: number, - cursorCondition: any, - models: Database + models: Database, + orderBy?: any, + limit?: number, + cursorCondition?: any ): Promise { // Obtain flowObjects conditions const flowObjectsConditions: Map< diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts index d6557e9e..01b7a21c 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -12,10 +12,10 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { async search( flowConditions: any, - orderBy: any, - limit: number, - cursorCondition: any, - models: Database + models: Database, + orderBy?: any, + limit?: number, + cursorCondition?: any ): Promise { // Build conditions object const conditions: any = { ...cursorCondition, ...flowConditions }; From c751f4c736c7c2cd6a5c309e4af8916caa19688f Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 23 Nov 2023 12:10:11 +0100 Subject: [PATCH 32/67] Fix sorting condition --- .../flows/flow-search-service.ts | 96 +++++++++++++++---- src/domain-services/flows/graphql/types.ts | 13 ++- .../impl/flow-object-conditions-strategy.ts | 35 +++++-- .../impl/only-flow-conditions-strategy.ts | 7 +- src/utils/graphql/pagination.ts | 10 +- 5 files changed, 121 insertions(+), 40 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 07088c9e..94fc9e3d 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,7 +1,6 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { CategoryService } from '../categories/category-service'; import { type Category } from '../categories/graphql/types'; @@ -29,6 +28,7 @@ import { type FlowParkedParentSource, type FlowSearchResult, type FlowSearchTotalAmountResult, + type FlowSortField, } from './graphql/types'; import { type FlowEntity } from './model'; import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; @@ -57,7 +57,9 @@ export class FlowSearchService { ): Promise { const { limit, afterCursor, beforeCursor, sortField, sortOrder } = filters; - const orderBy = { + const orderBy: + | { column: FlowSortField; order: 'asc' | 'desc' } + | Array<{ column: FlowSortField; order: 'asc' | 'desc' }> = { column: sortField ?? 'updatedAt', order: sortOrder ?? 'desc', }; @@ -66,7 +68,8 @@ export class FlowSearchService { const cursorCondition = this.buildCursorCondition( beforeCursor, - afterCursor + afterCursor, + orderBy ); // Determine strategy of how to search for flows @@ -197,7 +200,8 @@ export class FlowSearchService { parentIDs, externalReferences, reportDetailsWithChannel, - parkedParentSource + parkedParentSource, + sortField ); }) ); @@ -206,11 +210,11 @@ export class FlowSearchService { flows: items, hasNextPage: limit <= flows.length, hasPreviousPage: afterCursor !== undefined, - startCursor: flows.length ? flows[0].id.valueOf() : 0, - endCursor: flows.length ? flows.at(-1)?.id.valueOf() ?? 0 : 0, + startCursor: flows.length ? items[0].cursor : '', + endCursor: flows.length ? items.at(-1)?.cursor ?? '' : '', pageSize: flows.length, - sortField: orderBy.column, - sortOrder: orderBy.order, + sortField: sortField ?? 'updatedAt', + sortOrder: sortOrder ?? 'desc', total: count, }; } @@ -388,22 +392,61 @@ export class FlowSearchService { return parentFlows; } - private buildCursorCondition(beforeCursor: number, afterCursor: number) { + private buildCursorCondition( + beforeCursor: string, + afterCursor: string, + orderBy: + | { column: FlowSortField; order: 'asc' | 'desc' } + | Array<{ column: FlowSortField; order: 'asc' | 'desc' }> + ) { if (beforeCursor && afterCursor) { throw new Error('Cannot use before and after cursor at the same time'); } + if (!beforeCursor && !afterCursor) { + return {}; + } + + if (Array.isArray(orderBy)) { + // Build iterations of cursor conditions + const cursorConditions = orderBy.map((orderBy) => { + return this.buildCursorConditionForSingleOrderBy( + beforeCursor, + afterCursor, + orderBy + ); + }); + + // Combine cursor conditions + return { [Cond.AND]: cursorConditions }; + } + + return this.buildCursorConditionForSingleOrderBy( + beforeCursor, + afterCursor, + orderBy + ); + } + + private buildCursorConditionForSingleOrderBy( + beforeCursor: string, + afterCursor: string, + orderBy: { column: FlowSortField; order: 'asc' | 'desc' } + ) { let cursorCondition; - if (afterCursor) { - cursorCondition = { - id: { - [Op.GT]: createBrandedValue(afterCursor), - }, - }; - } else if (beforeCursor) { + + const comparisonOperator = + (afterCursor && orderBy.order === 'asc') || + (beforeCursor && orderBy.order === 'desc') + ? Op.GT + : Op.LT; + + const cursorValue = afterCursor || beforeCursor; + + if (cursorValue) { cursorCondition = { - id: { - [Op.LT]: createBrandedValue(beforeCursor), + [orderBy.column]: { + [comparisonOperator]: cursorValue, }, }; } @@ -422,8 +465,19 @@ export class FlowSearchService { parentIDs: number[], externalReferences: any[], reportDetails: any[], - parkedParentSource: FlowParkedParentSource[] + parkedParentSource: FlowParkedParentSource[], + sortColumn?: FlowSortField ): FlowPaged { + let cursor: string | number | Date = sortColumn + ? flow.id.valueOf() + : flow[sortColumn ?? 'updatedAt']; + + if (cursor instanceof Date) { + cursor = cursor.toISOString(); + } else if (typeof cursor === 'number') { + cursor = cursor.toString(); + } + return { // Mandatory fields id: flow.id.valueOf(), @@ -447,7 +501,7 @@ export class FlowSearchService { reportDetails, parkedParentSource, // Paged item field - cursor: flow.id.valueOf(), + cursor, }; } diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index ec865f31..7371ce73 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -100,8 +100,8 @@ export class Flow extends BaseFlow { @ObjectType() export class FlowPaged extends Flow implements IItemPaged { - @Field({ nullable: false }) - cursor: number; + @Field(() => String, { nullable: false }) + cursor: string; } @ObjectType() @@ -110,6 +110,15 @@ export class FlowSearchResult extends PageInfo { flows: FlowPaged[]; } +@ObjectType() +export class FlowSearchResultNonPaginated { + @Field(() => [FlowPaged], { nullable: false }) + flows: FlowPaged[]; + + @Field(() => Number, { nullable: false }) + flowsCount: number; +} + @ObjectType() export class FlowSearchTotalAmountResult { @Field(() => String, { nullable: false }) diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts index 47bacfcc..51d16337 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts @@ -1,6 +1,6 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; import { FlowService } from '../../flow-service'; @@ -42,20 +42,35 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { await this.getFlowIDsFromFilteredFlowObjects(models, flowObjectWhere); // Combine conditions from flowObjects FlowIDs and flow conditions - const mergedFlowConditions = { - ...flowEntityConditions, - id: { - [Op.IN]: flowIDsFromFilteredFlowObjects, - }, + const countConditions = { + [Cond.AND]: [ + flowEntityConditions ?? {}, + { + id: { + [Op.IN]: flowIDsFromFilteredFlowObjects, + }, + }, + ], }; - const conditions = { ...cursorCondition, ...mergedFlowConditions }; + // Combine cursor condition with flow conditions + const searchConditions = { + [Cond.AND]: [ + flowEntityConditions ?? {}, + cursorCondition ?? {}, + { + id: { + [Op.IN]: flowIDsFromFilteredFlowObjects, + }, + }, + ], + }; // Obtain flows and flowCount based on flowIDs from filtered flowObjects // and flow conditions const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, conditions, orderBy, limit), - this.flowService.getFlowsCount(models, mergedFlowConditions), + this.flowService.getFlows(models, searchConditions, orderBy, limit), + this.flowService.getFlowsCount(models, countConditions), ]); // Map count result query to count object @@ -80,7 +95,7 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { ); flowIDsFromFilteredFlowObjects.push(...flowIDs); - return flowIDsFromFilteredFlowObjects; + return flowIDsFromFilteredFlowObjects.sort(); } /* diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts index 01b7a21c..f21793c0 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts @@ -1,4 +1,5 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; +import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; import { @@ -18,10 +19,12 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { cursorCondition?: any ): Promise { // Build conditions object - const conditions: any = { ...cursorCondition, ...flowConditions }; + const searchConditions = { + [Cond.AND]: [flowConditions ?? {}, cursorCondition ?? {}], + }; const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, conditions, orderBy, limit), + this.flowService.getFlows(models, searchConditions, orderBy, limit), this.flowService.getFlowsCount(models, flowConditions), ]); diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index d53648b4..b0fc2d54 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -5,7 +5,7 @@ import { ArgsType, Field, ObjectType } from 'type-graphql'; export type SortOrder = 'asc' | 'desc'; export interface IItemPaged { - cursor: number; + cursor: string; } @ObjectType() @@ -17,10 +17,10 @@ export class PageInfo { hasPreviousPage: boolean; @Field({ nullable: false }) - startCursor: number; + startCursor: string; @Field({ nullable: false }) - endCursor: number; + endCursor: string; @Field({ nullable: false }) pageSize: number; @@ -71,10 +71,10 @@ export class PaginationArgs { limit: number; @Field({ nullable: true }) - afterCursor: number; + afterCursor: string; @Field({ nullable: true }) - beforeCursor: number; + beforeCursor: string; @Field(() => String, { nullable: true }) sortField: TSortFields; From 3c4836feed02e77d5c2ac5229ac8901eaa50c6f7 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 28 Nov 2023 08:03:19 +0100 Subject: [PATCH 33/67] Add resolver to fetch all flows without pages --- .../flows/flow-search-service.ts | 36 +++++++++++++++++++ src/domain-services/flows/graphql/resolver.ts | 13 +++++++ 2 files changed, 49 insertions(+) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 94fc9e3d..245f33dc 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -27,6 +27,7 @@ import { type FlowPaged, type FlowParkedParentSource, type FlowSearchResult, + type FlowSearchResultNonPaginated, type FlowSearchTotalAmountResult, type FlowSortField, } from './graphql/types'; @@ -532,4 +533,39 @@ export class FlowSearchService { flowsCount: count, }; } + + async searchBatches( + models: Database, + args: SearchFlowsArgs + ): Promise { + const flowSearchResponse = await this.search(models, args); + + const batchesMissing = + Math.round(flowSearchResponse.total / args.limit) - 1; + const flows: FlowPaged[] = flowSearchResponse.flows; + + let hasNextPage = flowSearchResponse.hasNextPage; + let batchCount = 1; + + let cursor = flowSearchResponse.endCursor; + let nextArgs: SearchFlowsArgs = { ...args, afterCursor: cursor }; + + let nextFlowSearchResponse: FlowSearchResult; + while (hasNextPage) { + batchCount++; + + nextFlowSearchResponse = await this.search(models, nextArgs); + + flows.push(...nextFlowSearchResponse.flows); + + hasNextPage = + nextFlowSearchResponse.hasNextPage && batchCount <= batchesMissing; + + cursor = nextFlowSearchResponse.endCursor; + // Update the cursor for the next iteration + nextArgs = { ...args, afterCursor: cursor }; + } + + return { flows, flowsCount: flows.length }; + } } diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index ad49913d..00091d92 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -6,6 +6,7 @@ import { SearchFlowsArgs, SearchFlowsArgsNonPaginated } from './args'; import { FlowPaged, FlowSearchResult, + FlowSearchResultNonPaginated, FlowSearchTotalAmountResult, } from './types'; @@ -31,4 +32,16 @@ export default class FlowResolver { ): Promise { return await this.flowSearchService.searchTotalAmount(context.models, args); } + + @Query(() => FlowSearchResultNonPaginated) + async searchFlowsBatches( + @Ctx() context: Context, + @Args(() => SearchFlowsArgs, { validate: false }) + args: SearchFlowsArgs + ): Promise { + // Set default batch size to 1000 + args.limit = args.limit > 0 ? args.limit : 1000; + + return await this.flowSearchService.searchBatches(context.models, args); + } } From 0d3b751f9a2ba9349070bb9ac0fc2ea5b242b9e0 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 4 Dec 2023 11:53:56 +0100 Subject: [PATCH 34/67] Fix: use correct notation for propertyID Rename reporterRefCode --- src/domain-services/flows/graphql/args.ts | 6 +++--- tests/unit/flow-search-service.spec.ts | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index e5ee56a9..1c922c6d 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -20,13 +20,13 @@ export class SearchFlowsFilters { amountUSD: number; @Field({ nullable: true }) - reporterReferenceCode: number; + reporterRefCode: number; @Field({ nullable: true }) - sourceSystemId: number; + sourceSystemID: number; @Field({ nullable: true }) - legacyId: number; + legacyID: number; } @InputType() diff --git a/tests/unit/flow-search-service.spec.ts b/tests/unit/flow-search-service.spec.ts index 7797d044..305a2d2b 100644 --- a/tests/unit/flow-search-service.spec.ts +++ b/tests/unit/flow-search-service.spec.ts @@ -20,9 +20,9 @@ describe('FlowSearchService', () => { flowFilters.status = 'commitment'; flowFilters.type = 'carryover'; flowFilters.amountUSD = 1000; - flowFilters.reporterReferenceCode = 123; - flowFilters.sourceSystemId = 456; - flowFilters.legacyId = 789; + flowFilters.reporterRefCode = 123; + flowFilters.sourceSystemID = 456; + flowFilters.legacyID = 789; const result = flowSearchService.prepareFlowConditions(flowFilters); @@ -32,7 +32,7 @@ describe('FlowSearchService', () => { status: 'commitment', type: 'carryover', amountUSD: 1000, - reporterReferenceCode: 123, + reporterRefCode: 123, sourceSystemId: 456, legacyId: 789, }); From d2600f5783431d8c9e40b69dc90482feeefc4639 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 4 Dec 2023 13:29:19 +0100 Subject: [PATCH 35/67] Remove throws to avoid breaking the process Remove unneeded variables from 'searchFlowsBatches' --- .../categories/category-service.ts | 9 ++--- .../flows/flow-search-service.ts | 10 +---- src/domain-services/flows/graphql/resolver.ts | 1 - .../location/location-service.ts | 19 +++++---- .../organizations/organization-service.ts | 30 +++++++------- src/domain-services/plans/plan-service.ts | 40 ++++++++----------- .../report-details/report-detail-service.ts | 23 +++++------ .../usage-years/usage-year-service.ts | 21 +++++----- 8 files changed, 66 insertions(+), 87 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index c64c54b4..c7fe5131 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -53,11 +53,10 @@ export class CategoryService { const category = categories.find((cat) => cat.id === catRef.categoryID); - if (!category) { - throw new Error(`Category with ID ${catRef.categoryID} does not exist`); - } - - if (!categoriesPerFlow.some((cat) => cat.id === category.id.valueOf())) { + if ( + category && + !categoriesPerFlow.some((cat) => cat.id === category.id.valueOf()) + ) { const mappedCategory = this.mapCategoryToFlowCategory(category, catRef); categoriesPerFlow.push(mappedCategory); } diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 245f33dc..ca0ee5db 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -540,28 +540,22 @@ export class FlowSearchService { ): Promise { const flowSearchResponse = await this.search(models, args); - const batchesMissing = - Math.round(flowSearchResponse.total / args.limit) - 1; const flows: FlowPaged[] = flowSearchResponse.flows; let hasNextPage = flowSearchResponse.hasNextPage; - let batchCount = 1; let cursor = flowSearchResponse.endCursor; let nextArgs: SearchFlowsArgs = { ...args, afterCursor: cursor }; let nextFlowSearchResponse: FlowSearchResult; while (hasNextPage) { - batchCount++; - nextFlowSearchResponse = await this.search(models, nextArgs); flows.push(...nextFlowSearchResponse.flows); - hasNextPage = - nextFlowSearchResponse.hasNextPage && batchCount <= batchesMissing; - + hasNextPage = nextFlowSearchResponse.hasNextPage; cursor = nextFlowSearchResponse.endCursor; + // Update the cursor for the next iteration nextArgs = { ...args, afterCursor: cursor }; } diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 00091d92..1e149518 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -41,7 +41,6 @@ export default class FlowResolver { ): Promise { // Set default batch size to 1000 args.limit = args.limit > 0 ? args.limit : 1000; - return await this.flowSearchService.searchBatches(context.models, args); } } diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index cb6311da..04274ee6 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -56,16 +56,15 @@ export class LocationService { } const location = locations.find((loc) => loc.id === locFO.objectID); - if (!location) { - throw new Error(`Location with ID ${locFO.objectID} does not exist`); - } - const locationsPerFlow = locationsMap.get(flowId)!; - if (!locationsPerFlow.some((loc) => loc.id === location.id)) { - const locationMapped = this.mapLocationsToFlowLocations( - location, - locFO - ); - locationsPerFlow.push(locationMapped); + if (location) { + const locationsPerFlow = locationsMap.get(flowId)!; + if (!locationsPerFlow.some((loc) => loc.id === location.id)) { + const locationMapped = this.mapLocationsToFlowLocations( + location, + locFO + ); + locationsPerFlow.push(locationMapped); + } } } return locationsMap; diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 2a73dae0..8a788578 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -26,22 +26,20 @@ export class OrganizationService { (org) => org.id === orgFO.objectID ); - if (!organization) { - throw new Error( - `Organization with ID ${orgFO.objectID} does not exist` - ); - } - - const organizationPerFlow = organizationsMap.get(flowId)!; - if ( - !organizationPerFlow.some((org) => org.id === organization.id.valueOf()) - ) { - const organizationMapped: Organization = - this.mapOrganizationsToOrganizationFlows( - organization, - orgFO.refDirection - ); - organizationsMap.get(flowId)!.push(organizationMapped); + if (organization) { + const organizationPerFlow = organizationsMap.get(flowId)!; + if ( + !organizationPerFlow.some( + (org) => org.id === organization.id.valueOf() + ) + ) { + const organizationMapped: Organization = + this.mapOrganizationsToOrganizationFlows( + organization, + orgFO.refDirection + ); + organizationsMap.get(flowId)!.push(organizationMapped); + } } } diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index d7aaa617..3796566e 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -78,29 +78,23 @@ export class PlanService { (planFO) => planFO.objectID === plan.id ); - if (!planVersion.length) { - throw new Error(`Plan with ID ${plan.id} does not have a version`); - } - - if (!planFlowObject) { - throw new Error(`Plan with ID ${plan.id} does not have a flow object`); - } - - const flowId = planFlowObject && planFlowObject.flowID; - - if (!plansMap.has(flowId)) { - plansMap.set(flowId, []); - } - - const plansPerFlow = plansMap.get(flowId)!; - - if (!plansPerFlow.some((plan) => plan.id === plan.id)) { - const planMapped = this.mapPlansToFlowPlans( - plan, - planVersion[0], - planFlowObject?.refDirection ?? null - ); - plansPerFlow.push(planMapped); + if (planVersion.length && planFlowObject) { + const flowId = planFlowObject && planFlowObject.flowID; + + if (!plansMap.has(flowId)) { + plansMap.set(flowId, []); + } + + const plansPerFlow = plansMap.get(flowId)!; + + if (!plansPerFlow.some((plan) => plan.id === plan.id)) { + const planMapped = this.mapPlansToFlowPlans( + plan, + planVersion[0], + planFlowObject?.refDirection ?? null + ); + plansPerFlow.push(planMapped); + } } } diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index e702ddde..6de2df9d 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -31,19 +31,18 @@ export class ReportDetailService { (report) => report && flowId === report?.flowID ); - if (!reportDetail) { - throw new Error(`Report detail with flow ID ${flowId} does not exist`); - } + if (reportDetail) { + const reportDetailsPerFlow = reportDetailsMap.get(flowId)!; - const reportDetailsPerFlow = reportDetailsMap.get(flowId)!; - if ( - !reportDetailsPerFlow.some( - (report) => report.id === reportDetail.id.valueOf() - ) - ) { - const reportDetailMapped = - this.mapReportDetailsToFlowReportDetail(reportDetail); - reportDetailsPerFlow.push(reportDetailMapped); + if ( + !reportDetailsPerFlow.some( + (report) => report.id === reportDetail.id.valueOf() + ) + ) { + const reportDetailMapped = + this.mapReportDetailsToFlowReportDetail(reportDetail); + reportDetailsPerFlow.push(reportDetailMapped); + } } } diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index d6463108..e903f4d2 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -30,18 +30,15 @@ export class UsageYearService { (uYear) => uYear.id === usageYearFO.objectID ); - if (!usageYear) { - throw new Error( - `Usage year with ID ${usageYearFO.objectID} does not exist` - ); - } - const usageYearsPerFlow = usageYearsMap.get(flowId)!; - if (!usageYearsPerFlow.some((uYear) => uYear.year === usageYear.year)) { - const usageYearMapped = this.mapUsageYearsToFlowUsageYears( - usageYear, - usageYearFO.refDirection - ); - usageYearsPerFlow.push(usageYearMapped); + if (usageYear) { + const usageYearsPerFlow = usageYearsMap.get(flowId)!; + if (!usageYearsPerFlow.some((uYear) => uYear.year === usageYear.year)) { + const usageYearMapped = this.mapUsageYearsToFlowUsageYears( + usageYear, + usageYearFO.refDirection + ); + usageYearsPerFlow.push(usageYearMapped); + } } } From a91467540dfcd6e8574b32070bf73a1000116185 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 5 Dec 2023 10:57:27 +0100 Subject: [PATCH 36/67] Temp: merge w 0d3b751f9a2ba9349070bb9ac0fc2ea5b242b9e0 --- src/domain-services/flows/graphql/args.ts | 12 ++++++------ tests/unit/flow-search-service.spec.ts | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 1c922c6d..fab20a24 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -19,14 +19,14 @@ export class SearchFlowsFilters { @Field({ nullable: true }) amountUSD: number; - @Field({ nullable: true }) - reporterRefCode: number; + @Field({ name: 'reporterRefCode', nullable: true }) + reporterReferenceCode: number; - @Field({ nullable: true }) - sourceSystemID: number; + @Field({ name: 'sourceSystemID', nullable: true }) + sourceSystemId: number; - @Field({ nullable: true }) - legacyID: number; + @Field({ name: 'legacyID', nullable: true }) + legacyId: number; } @InputType() diff --git a/tests/unit/flow-search-service.spec.ts b/tests/unit/flow-search-service.spec.ts index 305a2d2b..a12e1fa6 100644 --- a/tests/unit/flow-search-service.spec.ts +++ b/tests/unit/flow-search-service.spec.ts @@ -20,9 +20,9 @@ describe('FlowSearchService', () => { flowFilters.status = 'commitment'; flowFilters.type = 'carryover'; flowFilters.amountUSD = 1000; - flowFilters.reporterRefCode = 123; - flowFilters.sourceSystemID = 456; - flowFilters.legacyID = 789; + flowFilters.reporterReferenceCode = 123; + flowFilters.sourceSystemId = 456; + flowFilters.legacyId = 789; const result = flowSearchService.prepareFlowConditions(flowFilters); From 9bd47390a046df4109ae0d9393fd87c312e17c3d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 11 Dec 2023 09:43:09 +0100 Subject: [PATCH 37/67] Temp --- .../categories/category-service.ts | 32 ++-- src/domain-services/categories/model.ts | 27 ++++ .../flows/flow-search-service.ts | 98 +++++++----- src/domain-services/flows/graphql/args.ts | 28 +++- .../flows/strategy/flow-search-strategy.ts | 4 +- .../flows/strategy/flowID-search-strategy.ts | 17 +++ .../flow-object-conditions-strategy-impl.ts | 134 ++++++++++++++++ .../impl/flow-object-conditions-strategy.ts | 135 ---------------- ...-flow-category-conditions-strategy-impl.ts | 67 ++++++++ ...Ids-flow-mixed-conditions-strategy-impl.ts | 57 +++++++ ...ds-flow-object-conditions-strategy-impl.ts | 47 ++++++ ... => only-flow-conditions-strategy-impl.ts} | 0 .../flows/strategy/impl/utils.ts | 144 ++++++++++++++++++ src/utils/graphql/pagination.ts | 8 +- 14 files changed, 598 insertions(+), 200 deletions(-) create mode 100644 src/domain-services/categories/model.ts create mode 100644 src/domain-services/flows/strategy/flowID-search-strategy.ts create mode 100644 src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts delete mode 100644 src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts create mode 100644 src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts create mode 100644 src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts create mode 100644 src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts rename src/domain-services/flows/strategy/impl/{only-flow-conditions-strategy.ts => only-flow-conditions-strategy-impl.ts} (100%) create mode 100644 src/domain-services/flows/strategy/impl/utils.ts diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index c7fe5131..b356f4e6 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -1,34 +1,26 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; +import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; -import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { type Category } from './graphql/types'; @Service() export class CategoryService { async getCategoriesForFlows( - flowLinks: Map>>, + flowIDs: FlowId[], models: Database ): Promise> { - const flowLinksBrandedIds = []; - for (const flowLink of flowLinks.keys()) { - flowLinksBrandedIds.push(createBrandedValue(flowLink)); - } - // Group categories by flow ID for easy mapping const categoriesMap = new Map(); - if (flowLinksBrandedIds.length === 0) { - return categoriesMap; - } - const categoriesRef: Array> = await models.categoryRef.find({ where: { objectID: { - [Op.IN]: flowLinksBrandedIds, + [Op.IN]: flowIDs, }, + objectType: 'flow', }, }); @@ -89,4 +81,20 @@ export class CategoryService { }, }; } + + async findCategories(models: Database, where: any) { + const category = await models.category.find({ + where, + }); + + return category; + } + + async findCategoryRefs(models: Database, where: any) { + const categoryRef = await models.categoryRef.find({ + where, + }); + + return categoryRef; + } } diff --git a/src/domain-services/categories/model.ts b/src/domain-services/categories/model.ts new file mode 100644 index 00000000..a96d8f62 --- /dev/null +++ b/src/domain-services/categories/model.ts @@ -0,0 +1,27 @@ +export type CategoryGroup = + | 'beneficiaryGroup' + | 'contributionStatus' + | 'contributionType' + | 'customLocation' + | 'earmarkingType' + | 'emergencyType' + | 'flowStatus' + | 'flowType' + | 'genderMarker' + | 'inactiveReason' + | 'keywords' + | 'method' + | 'organizationLevel' + | 'organizationType' + | 'pendingStatus' + | 'planCosting' + | 'planIndicated' + | 'planType' + | 'projectGrouping1' + | 'projectGrouping2' + | 'projectPriority' + | 'regions' + | 'reportChannel' + | 'responseType' + | 'sectorIASC' + | 'subsetOfPlan'; diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index ca0ee5db..3a410d6d 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -18,6 +18,7 @@ import { ReportDetailService } from '../report-details/report-detail-service'; import { type UsageYear } from '../usage-years/grpahql/types'; import { UsageYearService } from '../usage-years/usage-year-service'; import { + type FlowCategoryFilters, type FlowObjectFilters, type SearchFlowsArgs, type SearchFlowsArgsNonPaginated, @@ -33,8 +34,8 @@ import { } from './graphql/types'; import { type FlowEntity } from './model'; import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; -import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy'; -import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy'; +import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy-impl'; +import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy-impl'; @Service() export class FlowSearchService { @@ -56,7 +57,8 @@ export class FlowSearchService { models: Database, filters: SearchFlowsArgs ): Promise { - const { limit, afterCursor, beforeCursor, sortField, sortOrder } = filters; + const { limit, nextPageCursor, prevPageCursor, sortField, sortOrder } = + filters; const orderBy: | { column: FlowSortField; order: 'asc' | 'desc' } @@ -65,18 +67,19 @@ export class FlowSearchService { order: sortOrder ?? 'desc', }; - const { flowFilters, flowObjectFilters } = filters; + const { flowFilters, flowObjectFilters, flowCategoryFilters } = filters; const cursorCondition = this.buildCursorCondition( - beforeCursor, - afterCursor, + prevPageCursor, + nextPageCursor, orderBy ); // Determine strategy of how to search for flows const { strategy, conditions } = this.determineStrategy( flowFilters, - flowObjectFilters + flowObjectFilters, + flowCategoryFilters ); // Fetch one more item to check for hasNextPage @@ -137,7 +140,7 @@ export class FlowSearchService { usageYearsMap, reportDetailsMap, ] = await Promise.all([ - this.categoryService.getCategoriesForFlows(flowLinksMap, models), + this.categoryService.getCategoriesForFlows(flowIds, models), this.organizationService.getOrganizationsForFlows( organizationsFO, models @@ -210,9 +213,9 @@ export class FlowSearchService { return { flows: items, hasNextPage: limit <= flows.length, - hasPreviousPage: afterCursor !== undefined, - startCursor: flows.length ? items[0].cursor : '', - endCursor: flows.length ? items.at(-1)?.cursor ?? '' : '', + hasPreviousPage: nextPageCursor !== undefined, + prevPageCursor: flows.length ? items[0].cursor : '', + nextPageCursor: flows.length ? items.at(-1)?.cursor ?? '' : '', pageSize: flows.length, sortField: sortField ?? 'updatedAt', sortOrder: sortOrder ?? 'desc', @@ -274,49 +277,62 @@ export class FlowSearchService { determineStrategy( flowFilters: SearchFlowsFilters, - flowObjectFilters: FlowObjectFilters[] + flowObjectFilters: FlowObjectFilters[], + flowCategoryFilters: FlowCategoryFilters ): { strategy: FlowSearchStrategy; conditions: any } { - let conditions = {}; - const isFlowFilterDefined = flowFilters !== undefined; const isFlowObjectFilterDefined = flowObjectFilters !== undefined; const isFlowObjectFiltersNotEmpty = isFlowObjectFilterDefined && flowObjectFilters.length !== 0; + const isFlowCategoryFilterDefined = flowCategoryFilters !== undefined; + if ( (!isFlowFilterDefined && - (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty)) || + (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && + !isFlowCategoryFilterDefined) || (isFlowFilterDefined && - (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty)) + (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && + !isFlowCategoryFilterDefined) ) { const flowConditions = this.prepareFlowConditions(flowFilters); - conditions = { ...conditions, ...flowConditions }; - return { strategy: this.onlyFlowFiltersStrategy, conditions }; - } else if (!isFlowFilterDefined && isFlowObjectFiltersNotEmpty) { + return { + strategy: this.onlyFlowFiltersStrategy, + conditions: flowConditions, + }; + } else if ( + !isFlowFilterDefined && + isFlowObjectFiltersNotEmpty && + isFlowCategoryFilterDefined + ) { const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); - conditions = { ...conditions, ...flowObjectConditions }; return { strategy: this.flowObjectFiltersStrategy, - conditions: this.buildConditionsMap(undefined, flowObjectConditions), + conditions: { + conditionsMap: this.buildConditionsMap({}, flowObjectConditions), + flowCategoryFilters, + }, }; - } else if (isFlowFilterDefined && isFlowObjectFiltersNotEmpty) { + } else if ( + isFlowFilterDefined && + isFlowObjectFiltersNotEmpty && + isFlowCategoryFilterDefined + ) { const flowConditions = this.prepareFlowConditions(flowFilters); const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); - conditions = { - ...conditions, - ...flowConditions, - ...flowObjectConditions, - }; return { strategy: this.flowObjectFiltersStrategy, - conditions: this.buildConditionsMap( - flowConditions, - flowObjectConditions - ), + conditions: { + conditionsMap: this.buildConditionsMap( + flowConditions, + flowObjectConditions + ), + flowCategoryFilters, + }, }; } @@ -469,14 +485,15 @@ export class FlowSearchService { parkedParentSource: FlowParkedParentSource[], sortColumn?: FlowSortField ): FlowPaged { - let cursor: string | number | Date = sortColumn - ? flow.id.valueOf() - : flow[sortColumn ?? 'updatedAt']; + let cursor = sortColumn ? flow[sortColumn] : flow.updatedAt; if (cursor instanceof Date) { cursor = cursor.toISOString(); } else if (typeof cursor === 'number') { cursor = cursor.toString(); + } else if (typeof cursor === 'boolean' || cursor === null) { + // cases such as 'boolean' + cursor = flow.id.toString(); } return { @@ -510,11 +527,12 @@ export class FlowSearchService { models: Database, args: SearchFlowsArgsNonPaginated ): Promise { - const { flowFilters, flowObjectFilters } = args; + const { flowFilters, flowObjectFilters, flowCategoryFilters } = args; const { strategy, conditions } = this.determineStrategy( flowFilters, - flowObjectFilters + flowObjectFilters, + flowCategoryFilters ); const { flows, count } = await strategy.search(conditions, models); @@ -544,8 +562,8 @@ export class FlowSearchService { let hasNextPage = flowSearchResponse.hasNextPage; - let cursor = flowSearchResponse.endCursor; - let nextArgs: SearchFlowsArgs = { ...args, afterCursor: cursor }; + let cursor = flowSearchResponse.nextPageCursor; + let nextArgs: SearchFlowsArgs = { ...args, nextPageCursor: cursor }; let nextFlowSearchResponse: FlowSearchResult; while (hasNextPage) { @@ -554,10 +572,10 @@ export class FlowSearchService { flows.push(...nextFlowSearchResponse.flows); hasNextPage = nextFlowSearchResponse.hasNextPage; - cursor = nextFlowSearchResponse.endCursor; + cursor = nextFlowSearchResponse.nextPageCursor; // Update the cursor for the next iteration - nextArgs = { ...args, afterCursor: cursor }; + nextArgs = { ...args, nextPageCursor: cursor }; } return { flows, flowsCount: flows.length }; diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index fab20a24..b58bfd69 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -29,6 +29,15 @@ export class SearchFlowsFilters { legacyId: number; } +@InputType() +export class FlowCategoryFilters { + @Field({ nullable: true }) + pending: boolean; + + @Field(() => [FlowCategory], { nullable: true }) + categoryFilters: FlowCategory[]; +} + @InputType() export class FlowObjectFilters { @Field({ nullable: false }) @@ -51,11 +60,14 @@ export class FlowObjectFilters { @InputType() export class FlowCategory { - @Field({ nullable: false }) + @Field({ nullable: true }) id: number; - @Field({ nullable: false }) + @Field({ nullable: true }) group: string; + + @Field({ nullable: true }) + name: string; } @ArgsType() @@ -66,11 +78,11 @@ export class SearchFlowsArgs extends PaginationArgs { @Field(() => [FlowObjectFilters], { nullable: true }) flowObjectFilters: FlowObjectFilters[]; - @Field(() => [FlowCategory], { nullable: true }) - categoryFilters: FlowCategory[]; - @Field({ nullable: true }) includeChildrenOfParkedFlows: boolean; + + @Field({ nullable: true }) + flowCategoryFilters: FlowCategoryFilters; } @ArgsType() @@ -81,9 +93,9 @@ export class SearchFlowsArgsNonPaginated { @Field(() => [FlowObjectFilters], { nullable: true }) flowObjectFilters: FlowObjectFilters[]; - @Field(() => [FlowCategory], { nullable: true }) - categoryFilters: FlowCategory[]; - @Field({ nullable: true }) includeChildrenOfParkedFlows: boolean; + + @Field({ nullable: true }) + flowCategoryFilters: FlowCategoryFilters; } diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 2d2d4015..c2cb3a19 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -8,7 +8,9 @@ export interface FlowSearchStrategyResponse { export interface FlowSearchStrategy { search( - flowConditions: Map, + flowConditions: + | Map + | { conditionsMap: Map; flowCategoryFilters: any }, models: Database, orderBy?: any, limit?: number, diff --git a/src/domain-services/flows/strategy/flowID-search-strategy.ts b/src/domain-services/flows/strategy/flowID-search-strategy.ts new file mode 100644 index 00000000..4a721325 --- /dev/null +++ b/src/domain-services/flows/strategy/flowID-search-strategy.ts @@ -0,0 +1,17 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type FlowCategoryFilters } from '../graphql/args'; + +export interface FlowIdSearchStrategyResponse { + flowIDs: FlowId[]; +} + +export interface FlowIDSearchStrategy { + search( + models: Database, + flowObjectsConditions: Map>, + flowCategoryConditions: FlowCategoryFilters + ): Promise; + + generateWhereClause(flowIds: FlowId[], conditions: any): any; +} diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts new file mode 100644 index 00000000..2a458701 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts @@ -0,0 +1,134 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Service } from 'typedi'; +import { FlowService } from '../../flow-service'; +import { type FlowCategoryFilters } from '../../graphql/args'; +import { + type FlowSearchStrategy, + type FlowSearchStrategyResponse, +} from '../flow-search-strategy'; +import { + type FlowIDSearchStrategy, + type FlowIdSearchStrategyResponse, +} from '../flowID-search-strategy'; +import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; +import { GetFlowIdsFromMixedConditionsStrategyImpl } from './get-flowIds-flow-mixed-conditions-strategy-impl'; + +@Service() +export class FlowObjectFiltersStrategy implements FlowSearchStrategy { + constructor( + private readonly flowService: FlowService, + private readonly getFlowIdsFromObjectConditions: GetFlowIdsFromMixedConditionsStrategyImpl, + private readonly getFlowIdsFromCategoryConditions: GetFlowIdsFromCategoryConditionsStrategyImpl, + private readonly getFlowIdsFromMixedConditions: GetFlowIdsFromMixedConditionsStrategyImpl + ) {} + + async search( + flowConditions: { + conditionsMap: Map; + flowCategoryFilters: FlowCategoryFilters; + }, + models: Database, + orderBy?: any, + limit?: number, + cursorCondition?: any + ): Promise { + const flowConditionsMap = flowConditions.conditionsMap; + // Obtain flowObjects conditions + const flowObjectsConditions: Map< + string, + Map + > = flowConditionsMap.get('flowObjects') ?? new Map(); + + // Obtain flow conditions + const flowEntityConditions = flowConditionsMap.get('flow') ?? new Map(); + + // Obtain flowCategory conditions + const flowCategoryConditions = flowConditions.flowCategoryFilters ?? {}; + + const searchFlowIdsStrategy: FlowIDSearchStrategy = this.determineStrategy( + flowObjectsConditions, + flowCategoryConditions + ); + + const { flowIDs: flowIdsToFilter }: FlowIdSearchStrategyResponse = + await searchFlowIdsStrategy.search( + models, + flowObjectsConditions, + flowCategoryConditions + ); + + // Combine conditions from flowObjects FlowIDs and flow conditions + const countConditions = { + [Cond.AND]: [ + flowEntityConditions ?? {}, + + searchFlowIdsStrategy.generateWhereClause( + flowIdsToFilter, + flowCategoryConditions + ), + ], + }; + + // Combine cursor condition with flow conditions + const searchConditions = { + [Cond.AND]: [ + flowEntityConditions ?? {}, + cursorCondition ?? {}, + searchFlowIdsStrategy.generateWhereClause( + flowIdsToFilter, + flowCategoryConditions + ), + ], + }; + + // Obtain flows and flowCount based on flowIDs from filtered flowObjects + // and flow conditions + const [flows, countRes] = await Promise.all([ + this.flowService.getFlows(models, searchConditions, orderBy, limit), + this.flowService.getFlowsCount(models, countConditions), + ]); + + // Map count result query to count object + const countObject = countRes[0] as { count: number }; + + return { flows, count: countObject.count }; + } + + // Determine the strategy to use in order to obtain flowIDs + // aiming to have the least amount of flowIDs to filter + // in the next step + // If there are flowObjects conditions + // use flowObjects strategy + // otherwise use flowCategories strategy + // If there are both flowObjects and flowCategories conditions + // use both and merge the results keeping only flowIDs + // present in both arrays + // otherwise keep all flowIDs from the one that is not empty + determineStrategy( + flowObjectsConditions: Map>, + flowCategoryConditions: any + ): any { + const isFlowObjectsConditionsIsDefined = + flowObjectsConditions !== undefined; + const isFlowCategoryConditionsIsDefined = + flowCategoryConditions !== undefined; + + const flowObjectsConditionsIsNotEmpty = + isFlowObjectsConditionsIsDefined && flowObjectsConditions.size; + const flowCategoryConditionsIsNotEmpty = + isFlowCategoryConditionsIsDefined && + Object.keys(flowCategoryConditions).length; + + if (flowObjectsConditionsIsNotEmpty && flowCategoryConditionsIsNotEmpty) { + return this.getFlowIdsFromMixedConditions; + } else if (flowObjectsConditionsIsNotEmpty) { + return this.getFlowIdsFromObjectConditions; + } else if (flowCategoryConditionsIsNotEmpty) { + return this.getFlowIdsFromCategoryConditions; + } + throw new Error( + 'No strategy found for flowObjectsConditions and flowCategoryConditions' + ); + } +} diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts deleted file mode 100644 index 51d16337..00000000 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { Service } from 'typedi'; -import { FlowObjectService } from '../../../flow-object/flow-object-service'; -import { FlowService } from '../../flow-service'; -import { - type FlowSearchStrategy, - type FlowSearchStrategyResponse, -} from '../flow-search-strategy'; - -@Service() -export class FlowObjectFiltersStrategy implements FlowSearchStrategy { - constructor( - private readonly flowService: FlowService, - private readonly flowObjectService: FlowObjectService - ) {} - - async search( - flowConditions: Map, - models: Database, - orderBy?: any, - limit?: number, - cursorCondition?: any - ): Promise { - // Obtain flowObjects conditions - const flowObjectsConditions: Map< - string, - Map - > = flowConditions.get('flowObjects') ?? new Map(); - - // Obtain flow conditions - const flowEntityConditions = flowConditions.get('flow') ?? new Map(); - - // Obtain where clause for flowObjects - const flowObjectWhere = this.mapFlowObjectConditionsToWhereClause( - flowObjectsConditions - ); - - // Obtain flowIDs based on provided flowObject conditions - const flowIDsFromFilteredFlowObjects: FlowId[] = - await this.getFlowIDsFromFilteredFlowObjects(models, flowObjectWhere); - - // Combine conditions from flowObjects FlowIDs and flow conditions - const countConditions = { - [Cond.AND]: [ - flowEntityConditions ?? {}, - { - id: { - [Op.IN]: flowIDsFromFilteredFlowObjects, - }, - }, - ], - }; - - // Combine cursor condition with flow conditions - const searchConditions = { - [Cond.AND]: [ - flowEntityConditions ?? {}, - cursorCondition ?? {}, - { - id: { - [Op.IN]: flowIDsFromFilteredFlowObjects, - }, - }, - ], - }; - - // Obtain flows and flowCount based on flowIDs from filtered flowObjects - // and flow conditions - const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, searchConditions, orderBy, limit), - this.flowService.getFlowsCount(models, countConditions), - ]); - - // Map count result query to count object - const countObject = countRes[0] as { count: number }; - - return { flows, count: countObject.count }; - } - - private async getFlowIDsFromFilteredFlowObjects( - models: Database, - flowObjectWhere: any[] - ): Promise { - const flowIDsFromFilteredFlowObjects: FlowId[] = []; - const tempFlowIDs: FlowId[][] = await Promise.all( - flowObjectWhere.map((whereClause) => - this.flowObjectService.getFlowIdsFromFlowObjects(models, whereClause) - ) - ); - // Flatten array of arrays keeping only values present in all arrays - const flowIDs = tempFlowIDs.reduce((a, b) => - a.filter((c) => b.includes(c)) - ); - flowIDsFromFilteredFlowObjects.push(...flowIDs); - - return flowIDsFromFilteredFlowObjects.sort(); - } - - /* - * Map structure: - * { - * KEY = objectType: string, - * VALUE = { - * KEY = refDirection: string, - * VALUE = [objectID: number] - * } - * } - */ - private mapFlowObjectConditionsToWhereClause( - flowObjectConditions: Map> - ): any[] { - const whereClauses: any = []; - for (const [objectType, refDirectionMap] of flowObjectConditions) { - for (const [refDirection, objectIDs] of refDirectionMap) { - const whereClause = { - objectID: { - [Op.IN]: objectIDs, - }, - refDirection: { - [Op.LIKE]: refDirection, - }, - objectType: { - [Op.LIKE]: objectType, - }, - }; - - whereClauses.push(whereClause); - } - } - - return whereClauses; - } -} diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts new file mode 100644 index 00000000..1225e431 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -0,0 +1,67 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type CategoryId } from '@unocha/hpc-api-core/src/db/models/category'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { Service } from 'typedi'; +import { CategoryService } from '../../../categories/category-service'; +import { type FlowCategoryFilters } from '../../graphql/args'; +import { + type FlowIDSearchStrategy, + type FlowIdSearchStrategyResponse, +} from '../flowID-search-strategy'; +import { mapFlowCategoryConditionsToWhereClause } from './utils'; + +@Service() +export class GetFlowIdsFromCategoryConditionsStrategyImpl + implements FlowIDSearchStrategy +{ + constructor(private readonly categoryService: CategoryService) {} + + async search( + models: Database, + flowObjectsConditions: Map>, + flowCategoryConditions: FlowCategoryFilters + ): Promise { + const whereClause = mapFlowCategoryConditionsToWhereClause( + flowCategoryConditions + ); + + const categories = await this.categoryService.findCategories( + models, + whereClause + ); + + const categoriesIds: CategoryId[] = categories.map( + (category) => category.id + ); + + const categoryRefs = await this.categoryService.findCategoryRefs(models, { + categoryID: { + [Op.IN]: categoriesIds, + }, + objectType: 'flow', + }); + + // Map category refs to flow IDs + // keep only unique values + // and return the list of flow IDs + const flowIds = [ + ...new Set(categoryRefs.map((categoryRef) => categoryRef.objectID)), + ].map((flowId) => createBrandedValue(flowId)); + + return { flowIDs: flowIds }; + } + + generateWhereClause( + flowIds: FlowId[], + flowCategoryConditions: FlowCategoryFilters + ) { + const operation = flowCategoryConditions.pending ? Op.IN : Op.NOT_IN; + return { + id: { + [operation]: flowIds, + }, + }; + } +} diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts new file mode 100644 index 00000000..99ff6626 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts @@ -0,0 +1,57 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Service } from 'typedi'; +import { type FlowCategoryFilters } from '../../graphql/args'; +import { + type FlowIDSearchStrategy, + type FlowIdSearchStrategyResponse, +} from '../flowID-search-strategy'; +import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; +import { GetFlowIdsFromObjectConditionsStrategyImpl } from './get-flowIds-flow-object-conditions-strategy-impl'; +import { mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories } from './utils'; + +@Service() +export class GetFlowIdsFromMixedConditionsStrategyImpl + implements FlowIDSearchStrategy +{ + constructor( + private readonly getFlowIdsFromObjectConditionsStrategy: GetFlowIdsFromObjectConditionsStrategyImpl, + private readonly getFlowIdsFromCategoryConditionsStrategy: GetFlowIdsFromCategoryConditionsStrategyImpl + ) {} + + async search( + models: Database, + flowObjectsConditions: Map>, + flowCategoryConditions: FlowCategoryFilters + ): Promise { + const { flowIDs: flowIdsFromFlowObjects }: FlowIdSearchStrategyResponse = + await this.getFlowIdsFromObjectConditionsStrategy.search( + models, + flowObjectsConditions + ); + + const { flowIDs: flowIdsFromFlowCategories }: FlowIdSearchStrategyResponse = + await this.getFlowIdsFromCategoryConditionsStrategy.search( + models, + flowObjectsConditions, + flowCategoryConditions + ); + + const mergeFlowIDs: FlowId[] = + mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( + flowIdsFromFlowObjects, + flowIdsFromFlowCategories + ); + + return { flowIDs: mergeFlowIDs }; + } + + generateWhereClause(flowIds: FlowId[]) { + return { + id: { + [Op.IN]: flowIds, + }, + }; + } +} diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts new file mode 100644 index 00000000..974436f5 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts @@ -0,0 +1,47 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Service } from 'typedi'; +import { FlowObjectService } from '../../../flow-object/flow-object-service'; +import { + type FlowIDSearchStrategy, + type FlowIdSearchStrategyResponse, +} from '../flowID-search-strategy'; +import { mapFlowObjectConditionsToWhereClause } from './utils'; + +@Service() +export class GetFlowIdsFromObjectConditionsStrategyImpl + implements FlowIDSearchStrategy +{ + constructor(private readonly flowObjectService: FlowObjectService) {} + + async search( + models: Database, + flowObjectsConditions: Map> + ): Promise { + const flowObjectWhere = mapFlowObjectConditionsToWhereClause( + flowObjectsConditions + ); + + const flowIDsFromFilteredFlowObjects: FlowId[] = []; + const tempFlowIDs: FlowId[][] = await Promise.all( + flowObjectWhere.map((whereClause) => + this.flowObjectService.getFlowIdsFromFlowObjects(models, whereClause) + ) + ); + + // Flatten array of arrays keeping only values present in all arrays + const flowIDs = tempFlowIDs.flat(); + flowIDsFromFilteredFlowObjects.push(...new Set(flowIDs)); + + return { flowIDs: flowIDsFromFilteredFlowObjects }; + } + + generateWhereClause(flowIds: FlowId[]) { + return { + id: { + [Op.IN]: flowIds, + }, + }; + } +} diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts similarity index 100% rename from src/domain-services/flows/strategy/impl/only-flow-conditions-strategy.ts rename to src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts new file mode 100644 index 00000000..16fa0e84 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -0,0 +1,144 @@ +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { type FlowCategoryFilters } from '../../graphql/args'; + +/* + * Map structure: + * { + * KEY = objectType: string, + * VALUE = { + * KEY = refDirection: string, + * VALUE = [objectID: number] + * } + * } + */ +export function mapFlowObjectConditionsToWhereClause( + flowObjectConditions: Map> +): any[] { + const whereClauses: any = []; + for (const [objectType, refDirectionMap] of flowObjectConditions) { + for (const [refDirection, objectIDs] of refDirectionMap) { + const whereClause = { + objectID: { + [Op.IN]: objectIDs, + }, + refDirection: { + [Op.LIKE]: refDirection, + }, + objectType: { + [Op.LIKE]: objectType, + }, + }; + + whereClauses.push(whereClause); + } + } + + return whereClauses; +} + +export function mapFlowCategoryConditionsToWhereClause( + flowCategoryConditions: FlowCategoryFilters +) { + let whereClause = {}; + + if (flowCategoryConditions.pending !== undefined) { + whereClause = { + group: 'inactiveReason', + name: 'Pending review', + }; + } + + if (flowCategoryConditions.categoryFilters?.length > 0) { + // Map category filters + // getting Id when possible + // or name and group otherwise + const categoryIdFilters: number[] = []; + const categoryFilters = new Map(); + for (const categoryFilter of flowCategoryConditions.categoryFilters) { + if (categoryFilter.id) { + categoryIdFilters.push(categoryFilter.id); + } else if (categoryFilter.group && categoryFilter.name) { + const group = categoryFilter.group; + const name = categoryFilter.name; + + const groupsNamesFilter = + (categoryFilters.get(group) as string[]) || []; + + groupsNamesFilter.push(name); + categoryFilters.set(group, groupsNamesFilter); + } + } + + if (categoryIdFilters.length > 0) { + whereClause = { + ...whereClause, + id: { + [Op.IN]: categoryIdFilters, + }, + }; + } + + // for each entry of the group name + // add a condition to the where clause + // with the names associated to the group + // both in the same AND clause + for (const [group, names] of categoryFilters) { + whereClause = { + ...whereClause, + [Cond.AND]: [ + { + group: { + [Op.LIKE]: group, + }, + name: { + [Op.IN]: names, + }, + }, + ], + }; + } + } + + return whereClause; +} + +export function mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( + flowIDsFromFilteredFlowObjects: FlowId[], + flowIDsFromFilteredFlowCategories: FlowId[] +): FlowId[] { + const isFlowIDsFromFilteredFlowCategoriesIsEmpty = + !flowIDsFromFilteredFlowCategories?.length; + const isFlowIDsFromFilteredFlowObjectsIsEmpty = + !flowIDsFromFilteredFlowObjects?.length; + + if ( + isFlowIDsFromFilteredFlowCategoriesIsEmpty && + isFlowIDsFromFilteredFlowObjectsIsEmpty + ) { + return []; + } + + if ( + isFlowIDsFromFilteredFlowCategoriesIsEmpty && + !isFlowIDsFromFilteredFlowObjectsIsEmpty + ) { + return flowIDsFromFilteredFlowObjects; + } + + if ( + !isFlowIDsFromFilteredFlowCategoriesIsEmpty && + isFlowIDsFromFilteredFlowObjectsIsEmpty + ) { + return flowIDsFromFilteredFlowCategories; + } + + return flowIDsFromFilteredFlowObjects.length > + flowIDsFromFilteredFlowCategories.length + ? flowIDsFromFilteredFlowCategories.filter((flowID) => + flowIDsFromFilteredFlowObjects.includes(flowID) + ) + : flowIDsFromFilteredFlowObjects.filter((flowID) => + flowIDsFromFilteredFlowCategories.includes(flowID) + ); +} diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index b0fc2d54..797d8a85 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -17,10 +17,10 @@ export class PageInfo { hasPreviousPage: boolean; @Field({ nullable: false }) - startCursor: string; + prevPageCursor: string; @Field({ nullable: false }) - endCursor: string; + nextPageCursor: string; @Field({ nullable: false }) pageSize: number; @@ -71,10 +71,10 @@ export class PaginationArgs { limit: number; @Field({ nullable: true }) - afterCursor: string; + nextPageCursor: string; @Field({ nullable: true }) - beforeCursor: string; + prevPageCursor: string; @Field(() => String, { nullable: true }) sortField: TSortFields; From 9673b34a1b83397469d21763d90a1b222b893dbb Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 11 Dec 2023 18:56:37 +0100 Subject: [PATCH 38/67] Temp2 --- .../flows/flow-search-service.ts | 165 ++++++++++-------- src/domain-services/flows/graphql/resolver.ts | 4 +- src/domain-services/flows/graphql/types.ts | 56 +++--- src/domain-services/flows/model.ts | 8 +- ...-flow-category-conditions-strategy-impl.ts | 2 +- .../organizations/organization-service.ts | 4 +- src/utils/graphql/pagination.ts | 2 +- 7 files changed, 132 insertions(+), 109 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 3a410d6d..a15aacc6 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,7 +1,8 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; -import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; +import { type SortOrder } from '../../utils/graphql/pagination'; import { CategoryService } from '../categories/category-service'; import { type Category } from '../categories/graphql/types'; import { ExternalReferenceService } from '../external-reference/external-reference-service'; @@ -25,14 +26,14 @@ import { type SearchFlowsFilters, } from './graphql/args'; import { - type FlowPaged, + type Flow, type FlowParkedParentSource, type FlowSearchResult, type FlowSearchResultNonPaginated, type FlowSearchTotalAmountResult, type FlowSortField, } from './graphql/types'; -import { type FlowEntity } from './model'; +import { type FlowEntity, type FlowOrderBy } from './model'; import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy-impl'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy-impl'; @@ -60,12 +61,7 @@ export class FlowSearchService { const { limit, nextPageCursor, prevPageCursor, sortField, sortOrder } = filters; - const orderBy: - | { column: FlowSortField; order: 'asc' | 'desc' } - | Array<{ column: FlowSortField; order: 'asc' | 'desc' }> = { - column: sortField ?? 'updatedAt', - order: sortOrder ?? 'desc', - }; + const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); const { flowFilters, flowObjectFilters, flowCategoryFilters } = filters; @@ -79,7 +75,8 @@ export class FlowSearchService { const { strategy, conditions } = this.determineStrategy( flowFilters, flowObjectFilters, - flowCategoryFilters + flowCategoryFilters, + orderBy ); // Fetch one more item to check for hasNextPage @@ -204,25 +201,86 @@ export class FlowSearchService { parentIDs, externalReferences, reportDetailsWithChannel, - parkedParentSource, - sortField + parkedParentSource ); }) ); + // Sort items + // FIXME: this sorts the page, not the whole result set + items.sort((a: Flow, b: Flow) => { + const nestedA = a[orderBy.entity as keyof Flow]; + const nestedB = b[orderBy.entity as keyof Flow]; + + if (nestedA && nestedB) { + const propertyA = nestedA[orderBy.column as keyof typeof nestedA]; + const propertyB = nestedB[orderBy.column as keyof typeof nestedB]; + + // Implement your custom comparison logic + // For example, compare strings or numbers + if (propertyA < propertyB) { + return orderBy.order === 'asc' ? -1 : 1; + } + if (propertyA > propertyB) { + return orderBy.order === 'asc' ? 1 : -1; + } + } + + return 0; + }); + + const isOrderByForFlows = orderBy.entity === 'flow'; + const firstItem = items[0]; + const prevPageCursorEntity = isOrderByForFlows + ? firstItem + : firstItem[orderBy.entity as keyof typeof firstItem]; + const prevPageCursorValue = prevPageCursorEntity + ? prevPageCursorEntity[ + orderBy.column as keyof typeof prevPageCursorEntity + ] ?? '' + : ''; + + const lastItem = items.at(-1); + const nextPageCursorEntity = isOrderByForFlows + ? lastItem + : lastItem![orderBy.entity as keyof typeof lastItem]; + const nextPageCursorValue = nextPageCursorEntity + ? nextPageCursorEntity[ + orderBy.column as keyof typeof nextPageCursorEntity + ]?.toString() ?? '' + : ''; + + // TODO: implement nested cursors for page return { flows: items, hasNextPage: limit <= flows.length, hasPreviousPage: nextPageCursor !== undefined, - prevPageCursor: flows.length ? items[0].cursor : '', - nextPageCursor: flows.length ? items.at(-1)?.cursor ?? '' : '', + prevPageCursor: prevPageCursorValue, + nextPageCursor: nextPageCursorValue, pageSize: flows.length, - sortField: sortField ?? 'updatedAt', + sortField: `${orderBy.entity}.${orderBy.column}` as FlowSortField, sortOrder: sortOrder ?? 'desc', total: count, }; } + buildOrderBy(sortField?: FlowSortField, sortOrder?: SortOrder) { + const orderBy: FlowOrderBy = { + column: sortField ?? 'updatedAt', + order: sortOrder ?? ('desc' as SortOrder), + entity: 'flow', + }; + + // Check if sortField is a nested property + if (orderBy.column.includes('.')) { + const [nestedEntity, propertyToSort] = orderBy.column.split('.'); + // Update orderBy object with nested information + orderBy.column = propertyToSort; + orderBy.entity = nestedEntity; + } + + return orderBy; + } prepareFlowConditions(flowFilters: SearchFlowsFilters): any { let flowConditions = {}; @@ -242,7 +300,7 @@ export class FlowSearchService { } prepareFlowObjectConditions( - flowObjectFilters: FlowObjectFilters[] + flowObjectFilters: FlowObjectFilters[] = [] ): Map> { const flowObjectsConditions: Map> = new Map< string, @@ -278,7 +336,8 @@ export class FlowSearchService { determineStrategy( flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], - flowCategoryFilters: FlowCategoryFilters + flowCategoryFilters: FlowCategoryFilters, + orderBy?: FlowOrderBy ): { strategy: FlowSearchStrategy; conditions: any } { const isFlowFilterDefined = flowFilters !== undefined; const isFlowObjectFilterDefined = flowObjectFilters !== undefined; @@ -287,6 +346,8 @@ export class FlowSearchService { const isFlowCategoryFilterDefined = flowCategoryFilters !== undefined; + const isOrderByForFlows = orderBy?.entity === 'flow'; + if ( (!isFlowFilterDefined && (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && @@ -296,30 +357,20 @@ export class FlowSearchService { !isFlowCategoryFilterDefined) ) { const flowConditions = this.prepareFlowConditions(flowFilters); + if (!isOrderByForFlows) { + return { + strategy: this.flowObjectFiltersStrategy, + conditions: { + conditionsMap: this.buildConditionsMap(flowConditions, {}), + flowCategoryFilters, + }, + }; + } return { strategy: this.onlyFlowFiltersStrategy, conditions: flowConditions, }; - } else if ( - !isFlowFilterDefined && - isFlowObjectFiltersNotEmpty && - isFlowCategoryFilterDefined - ) { - const flowObjectConditions = - this.prepareFlowObjectConditions(flowObjectFilters); - - return { - strategy: this.flowObjectFiltersStrategy, - conditions: { - conditionsMap: this.buildConditionsMap({}, flowObjectConditions), - flowCategoryFilters, - }, - }; - } else if ( - isFlowFilterDefined && - isFlowObjectFiltersNotEmpty && - isFlowCategoryFilterDefined - ) { + } else if (isFlowObjectFiltersNotEmpty || isFlowCategoryFilterDefined) { const flowConditions = this.prepareFlowConditions(flowFilters); const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); @@ -412,9 +463,7 @@ export class FlowSearchService { private buildCursorCondition( beforeCursor: string, afterCursor: string, - orderBy: - | { column: FlowSortField; order: 'asc' | 'desc' } - | Array<{ column: FlowSortField; order: 'asc' | 'desc' }> + orderBy: FlowOrderBy ) { if (beforeCursor && afterCursor) { throw new Error('Cannot use before and after cursor at the same time'); @@ -424,20 +473,6 @@ export class FlowSearchService { return {}; } - if (Array.isArray(orderBy)) { - // Build iterations of cursor conditions - const cursorConditions = orderBy.map((orderBy) => { - return this.buildCursorConditionForSingleOrderBy( - beforeCursor, - afterCursor, - orderBy - ); - }); - - // Combine cursor conditions - return { [Cond.AND]: cursorConditions }; - } - return this.buildCursorConditionForSingleOrderBy( beforeCursor, afterCursor, @@ -448,7 +483,7 @@ export class FlowSearchService { private buildCursorConditionForSingleOrderBy( beforeCursor: string, afterCursor: string, - orderBy: { column: FlowSortField; order: 'asc' | 'desc' } + orderBy: FlowOrderBy ) { let cursorCondition; @@ -482,20 +517,8 @@ export class FlowSearchService { parentIDs: number[], externalReferences: any[], reportDetails: any[], - parkedParentSource: FlowParkedParentSource[], - sortColumn?: FlowSortField - ): FlowPaged { - let cursor = sortColumn ? flow[sortColumn] : flow.updatedAt; - - if (cursor instanceof Date) { - cursor = cursor.toISOString(); - } else if (typeof cursor === 'number') { - cursor = cursor.toString(); - } else if (typeof cursor === 'boolean' || cursor === null) { - // cases such as 'boolean' - cursor = flow.id.toString(); - } - + parkedParentSource: FlowParkedParentSource[] + ): Flow { return { // Mandatory fields id: flow.id.valueOf(), @@ -518,8 +541,6 @@ export class FlowSearchService { externalReferences, reportDetails, parkedParentSource, - // Paged item field - cursor, }; } @@ -558,7 +579,7 @@ export class FlowSearchService { ): Promise { const flowSearchResponse = await this.search(models, args); - const flows: FlowPaged[] = flowSearchResponse.flows; + const flows: Flow[] = flowSearchResponse.flows; let hasNextPage = flowSearchResponse.hasNextPage; diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 1e149518..241bb988 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -4,14 +4,14 @@ import Context from '../../Context'; import { FlowSearchService } from '../flow-search-service'; import { SearchFlowsArgs, SearchFlowsArgsNonPaginated } from './args'; import { - FlowPaged, + Flow, FlowSearchResult, FlowSearchResultNonPaginated, FlowSearchTotalAmountResult, } from './types'; @Service() -@Resolver(FlowPaged) +@Resolver(Flow) export default class FlowResolver { constructor(private flowSearchService: FlowSearchService) {} diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 7371ce73..30964bba 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -1,6 +1,6 @@ import { Field, ObjectType } from 'type-graphql'; import { BaseType } from '../../../utils/graphql/base-types'; -import { PageInfo, type IItemPaged } from '../../../utils/graphql/pagination'; +import { PageInfo } from '../../../utils/graphql/pagination'; import { Category } from '../../categories/graphql/types'; import { BaseLocation } from '../../location/graphql/types'; import { Organization } from '../../organizations/graphql/types'; @@ -98,22 +98,16 @@ export class Flow extends BaseFlow { parkedParentSource: FlowParkedParentSource[]; } -@ObjectType() -export class FlowPaged extends Flow implements IItemPaged { - @Field(() => String, { nullable: false }) - cursor: string; -} - @ObjectType() export class FlowSearchResult extends PageInfo { - @Field(() => [FlowPaged], { nullable: false }) - flows: FlowPaged[]; + @Field(() => [Flow], { nullable: false }) + flows: Flow[]; } @ObjectType() export class FlowSearchResultNonPaginated { - @Field(() => [FlowPaged], { nullable: false }) - flows: FlowPaged[]; + @Field(() => [Flow], { nullable: false }) + flows: Flow[]; @Field(() => Number, { nullable: false }) flowsCount: number; @@ -129,23 +123,23 @@ export class FlowSearchTotalAmountResult { } export type FlowSortField = - | 'id' - | 'versionID' - | 'amountUSD' - | 'updatedAt' - | 'activeStatus' - | 'restricted' - | 'newMoney' - | 'flowDate' - | 'decisionDate' - | 'firstReportedDate' - | 'budgetYear' - | 'origAmount' - | 'origCurrency' - | 'exchangeRate' - | 'description' - | 'notes' - | 'versionStartDate' - | 'versionEndDate' - | 'createdAt' - | 'deletedAt'; + | 'flow.id' + | 'flow.versionID' + | 'flow.amountUSD' + | 'flow.updatedAt' + | 'flow.activeStatus' + | 'flow.restricted' + | 'flow.newMoney' + | 'flow.flowDate' + | 'flow.decisionDate' + | 'flow.firstReportedDate' + | 'flow.budgetYear' + | 'flow.origAmount' + | 'flow.origCurrency' + | 'flow.exchangeRate' + | 'flow.description' + | 'flow.notes' + | 'flow.versionStartDate' + | 'flow.versionEndDate' + | 'flow.createdAt' + | 'flow.deletedAt'; diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 3c425a7e..4a1eb492 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -1,4 +1,10 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; - +import { type SortOrder } from '../../utils/graphql/pagination'; export type FlowEntity = InstanceDataOfModel; + +export type FlowOrderBy = { + column: string; + order: SortOrder; + entity: string; +}; diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index 1225e431..3e95bc83 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -20,7 +20,7 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl async search( models: Database, - flowObjectsConditions: Map>, + _flowObjectsConditions: Map>, flowCategoryConditions: FlowCategoryFilters ): Promise { const whereClause = mapFlowCategoryConditionsToWhereClause( diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index 8a788578..f235a918 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -30,7 +30,9 @@ export class OrganizationService { const organizationPerFlow = organizationsMap.get(flowId)!; if ( !organizationPerFlow.some( - (org) => org.id === organization.id.valueOf() + (org) => + org.id === organization.id.valueOf() && + org.direction === orgFO.refDirection ) ) { const organizationMapped: Organization = diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index 797d8a85..4d77af65 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -36,7 +36,7 @@ export class PageInfo { } export function prepareConditionFromCursor( - sortCondition: { column: string; order: 'asc' | 'desc' }, + sortCondition: { column: string; order: SortOrder }, afterCursor?: number, beforeCursor?: number ): any { From f8dadc82eca736d352f8720bde9613d9427e9c9d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 14 Dec 2023 08:23:00 +0100 Subject: [PATCH 39/67] Allow multiple reportingDetails per flow --- src/domain-services/categories/category-service.ts | 2 +- src/domain-services/flows/flow-search-service.ts | 1 - .../report-details/report-detail-service.ts | 13 +++++-------- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index b356f4e6..c86d6b09 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -1,5 +1,5 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; -import { FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index a15aacc6..43393f72 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -250,7 +250,6 @@ export class FlowSearchService { ]?.toString() ?? '' : ''; - // TODO: implement nested cursors for page return { flows: items, hasNextPage: limit <= flows.length, diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index 6de2df9d..4c8c0fa5 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -27,18 +27,15 @@ export class ReportDetailService { if (!reportDetailsMap.has(flowId)) { reportDetailsMap.set(flowId, []); } - const reportDetail = reportDetails.find( - (report) => report && flowId === report?.flowID + + const flowsReportingDetails = reportDetails.filter( + (report) => report.flowID === flowId ); - if (reportDetail) { + if (flowsReportingDetails && flowsReportingDetails.length > 0) { const reportDetailsPerFlow = reportDetailsMap.get(flowId)!; - if ( - !reportDetailsPerFlow.some( - (report) => report.id === reportDetail.id.valueOf() - ) - ) { + for (const reportDetail of flowsReportingDetails) { const reportDetailMapped = this.mapReportDetailsToFlowReportDetail(reportDetail); reportDetailsPerFlow.push(reportDetailMapped); From 2301d01cefe71d53f263c6d785f4a1a56f6e2c41 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 14 Dec 2023 08:39:00 +0100 Subject: [PATCH 40/67] Allow ParentID from category to be null --- src/domain-services/categories/category-service.ts | 2 +- src/domain-services/categories/graphql/types.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index c86d6b09..21e5a125 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -68,7 +68,7 @@ export class CategoryService { createdAt: category.createdAt.toISOString(), updatedAt: category.updatedAt.toISOString(), description: category.description ?? '', - parentID: category.parentID ? category.parentID.valueOf() : 0, + parentID: category.parentID ? category.parentID.valueOf() : null, code: category.code ?? '', includeTotals: category.includeTotals ?? false, categoryRef: { diff --git a/src/domain-services/categories/graphql/types.ts b/src/domain-services/categories/graphql/types.ts index 5cf55816..2dc367b9 100644 --- a/src/domain-services/categories/graphql/types.ts +++ b/src/domain-services/categories/graphql/types.ts @@ -30,8 +30,8 @@ export class Category extends BaseType { @Field({ nullable: true }) description: string; - @Field({ nullable: true }) - parentID: number; + @Field(() => Number, { nullable: true }) + parentID: number | null; @Field({ nullable: true }) code: string; From 812d9f6dc73c13a21e7d1ec169c3674f36515a25 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 14 Dec 2023 17:50:00 +0100 Subject: [PATCH 41/67] Correct mapping of flow with version and its categories --- .../categories/category-service.ts | 40 ++++++++++++++----- .../categories/graphql/types.ts | 3 ++ .../flows/flow-search-service.ts | 22 ++++++++-- 3 files changed, 52 insertions(+), 13 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index 21e5a125..293116f1 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -8,11 +8,20 @@ import { type Category } from './graphql/types'; @Service() export class CategoryService { async getCategoriesForFlows( - flowIDs: FlowId[], + flowWithVersion: Map, models: Database - ): Promise> { - // Group categories by flow ID for easy mapping - const categoriesMap = new Map(); + ): Promise>> { + // Group of flowIDs and its versions + // Structure: + // flowID: { + // versionID: [categories] + // } + const flowVersionCategoryMap = new Map>(); + + const flowIDs: FlowId[] = []; + for (const flowID of flowWithVersion.keys()) { + flowIDs.push(flowID); + } const categoriesRef: Array> = await models.categoryRef.find({ @@ -37,24 +46,34 @@ export class CategoryService { for (const catRef of categoriesRef) { const flowId = catRef.objectID.valueOf(); - if (!categoriesMap.has(flowId)) { - categoriesMap.set(flowId, []); + if (!flowVersionCategoryMap.has(flowId)) { + flowVersionCategoryMap.set(flowId, new Map()); + } + + // Here the key is the versionID of the flow + const flowVersionMap = flowVersionCategoryMap.get(flowId)!; + + const flowVersion = catRef.versionID; + if (!flowVersionMap.has(flowVersion)) { + flowVersionMap.set(flowVersion, []); } - const categoriesPerFlow = categoriesMap.get(flowId)!; + const categoriesPerFlowVersion = flowVersionMap.get(flowVersion)!; const category = categories.find((cat) => cat.id === catRef.categoryID); if ( category && - !categoriesPerFlow.some((cat) => cat.id === category.id.valueOf()) + !categoriesPerFlowVersion.some( + (cat) => cat.id === category.id.valueOf() + ) ) { const mappedCategory = this.mapCategoryToFlowCategory(category, catRef); - categoriesPerFlow.push(mappedCategory); + categoriesPerFlowVersion.push(mappedCategory); } } - return categoriesMap; + return flowVersionCategoryMap; } private mapCategoryToFlowCategory( @@ -79,6 +98,7 @@ export class CategoryService { createdAt: categoryRef.createdAt.toISOString(), updatedAt: categoryRef.updatedAt.toISOString(), }, + versionID: categoryRef.versionID, }; } diff --git a/src/domain-services/categories/graphql/types.ts b/src/domain-services/categories/graphql/types.ts index 2dc367b9..5a1859b4 100644 --- a/src/domain-services/categories/graphql/types.ts +++ b/src/domain-services/categories/graphql/types.ts @@ -41,4 +41,7 @@ export class Category extends BaseType { @Field(() => CategoryRef, { nullable: true }) categoryRef: CategoryRef; + + @Field({ nullable: false }) + versionID: number; } diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 43393f72..049ddbd5 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -97,7 +97,18 @@ export class FlowSearchService { flows.pop(); } - const flowIds: FlowId[] = flows.map((flow) => flow.id); + const flowIds: FlowId[] = []; + const flowWithVersion: Map = new Map(); + + // Obtain flow IDs and flow version IDs + for (const flow of flows) { + flowIds.push(flow.id); + if (!flowWithVersion.has(flow.id)) { + flowWithVersion.set(flow.id, []); + } + const flowVersionIDs = flowWithVersion.get(flow.id)!; + flowVersionIDs.push(flow.versionID); + } // Obtain external references and flow objects in parallel const [externalReferencesMap, flowObjects] = await Promise.all([ @@ -137,7 +148,7 @@ export class FlowSearchService { usageYearsMap, reportDetailsMap, ] = await Promise.all([ - this.categoryService.getCategoriesForFlows(flowIds, models), + this.categoryService.getCategoriesForFlows(flowWithVersion, models), this.organizationService.getOrganizationsForFlows( organizationsFO, models @@ -151,7 +162,12 @@ export class FlowSearchService { const items = await Promise.all( flows.map(async (flow) => { const flowLink = flowLinksMap.get(flow.id) ?? []; - const categories = categoriesMap.get(flow.id) ?? []; + + // Categories Map follows the structure: + // flowID: { versionID: [categories]} + // So we need to get the categories for the flow version + const categories = + categoriesMap.get(flow.id)!.get(flow.versionID) ?? []; const organizations = organizationsMap.get(flow.id) ?? []; const locations = locationsMap.get(flow.id) ?? []; const plans = plansMap.get(flow.id) ?? []; From be18378a97b40b62ff87aafbcd12f3b7bc82d757 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 15 Dec 2023 16:25:00 +0100 Subject: [PATCH 42/67] Merge with Temp Commit about sorting - needs refactor but fixes more --- .../flows/flow-search-service.ts | 127 ++++++++++++++---- src/domain-services/flows/model.ts | 6 + .../flow-object-conditions-strategy-impl.ts | 6 +- .../only-flow-conditions-strategy-impl.ts | 6 +- .../flows/strategy/impl/utils.ts | 12 ++ 5 files changed, 126 insertions(+), 31 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 049ddbd5..fbf8f946 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -33,7 +33,11 @@ import { type FlowSearchTotalAmountResult, type FlowSortField, } from './graphql/types'; -import { type FlowEntity, type FlowOrderBy } from './model'; +import { + type FlowEntity, + type FlowNestedDirection, + type FlowOrderBy, +} from './model'; import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy-impl'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy-impl'; @@ -75,8 +79,7 @@ export class FlowSearchService { const { strategy, conditions } = this.determineStrategy( flowFilters, flowObjectFilters, - flowCategoryFilters, - orderBy + flowCategoryFilters ); // Fetch one more item to check for hasNextPage @@ -225,20 +228,87 @@ export class FlowSearchService { // Sort items // FIXME: this sorts the page, not the whole result set items.sort((a: Flow, b: Flow) => { - const nestedA = a[orderBy.entity as keyof Flow]; - const nestedB = b[orderBy.entity as keyof Flow]; + const entityKey = orderBy.entity as keyof Flow; + + const nestedA = a[entityKey]; + const nestedB = b[entityKey]; if (nestedA && nestedB) { + if (orderBy.direction) { + // This means the orderBy came in the format: + // column: 'nestedEntity.direction.property' + // So we need to get the entry of the nested entity + // which its direction matches the orderBy direction + // and sort by the property using the orderBy order + + // Fisrt, check if the nestedEntity is trusy an Array + if (!Array.isArray(nestedA)) { + return 0; + } + if (!Array.isArray(nestedB)) { + return 0; + } + + // Now we ensure both properties are arrays + // we can assume that the nestedEntity is one of the following: + // organizations, locations, plans, usageYears + const directionEntityA = nestedA as unknown as + | Organization[] + | BaseLocation[] + | BasePlan[] + | UsageYear[]; + const directionEntityB = nestedB as unknown as + | Organization[] + | BaseLocation[] + | BasePlan[] + | UsageYear[]; + + // Then we find the entry of the nestedEntity that matches the orderBy direction + const nestedEntityA = directionEntityA.find( + (nestedEntity: any) => orderBy.direction === nestedEntity.direction + ); + const nestedEntityB = directionEntityB.find( + (nestedEntity: any) => orderBy.direction === nestedEntity.direction + ); + + // After, we need to check there is an entry that matches the orderBy direction + // if not, we return 0 + if (!nestedEntityA) { + return 0; + } + if (!nestedEntityB) { + return 0; + } + + // Now we can sort by the property using the orderBy order + const propertyA = + nestedEntityA[orderBy.column as keyof typeof nestedEntityA]; + const propertyB = + nestedEntityB[orderBy.column as keyof typeof nestedEntityB]; + + // Finally, we check that the property is defined + // and if so - we sort by the property using the orderBy order + if (propertyA && propertyB) { + if (orderBy.order === 'asc') { + return propertyA > propertyB ? 1 : -1; + } + return propertyA < propertyB ? 1 : -1; + } + } + // Since there is no direction expecified in the orderBy + // we can assume that the nestedEntity is one of the following: + // childIDs, parentIDs, externalReferences, reportDetails, parkedParentSource, categories + // and we can sort by the property using the orderBy order const propertyA = nestedA[orderBy.column as keyof typeof nestedA]; const propertyB = nestedB[orderBy.column as keyof typeof nestedB]; - // Implement your custom comparison logic - // For example, compare strings or numbers - if (propertyA < propertyB) { - return orderBy.order === 'asc' ? -1 : 1; - } - if (propertyA > propertyB) { - return orderBy.order === 'asc' ? 1 : -1; + // Finally, we check that the property is defined + // and if so - we sort by the property using the orderBy order + if (propertyA && propertyB) { + if (orderBy.order === 'asc') { + return propertyA > propertyB ? 1 : -1; + } + return propertyA < propertyB ? 1 : -1; } } @@ -283,15 +353,26 @@ export class FlowSearchService { const orderBy: FlowOrderBy = { column: sortField ?? 'updatedAt', order: sortOrder ?? ('desc' as SortOrder), + direction: null, entity: 'flow', }; // Check if sortField is a nested property if (orderBy.column.includes('.')) { - const [nestedEntity, propertyToSort] = orderBy.column.split('.'); - // Update orderBy object with nested information - orderBy.column = propertyToSort; - orderBy.entity = nestedEntity; + // OrderBy can came in the format: + // column: 'organizations.source.name' + // or in the format: + // column: 'flow.updatedAt' + const struct = orderBy.column.split('.'); + + if (struct.length === 2) { + orderBy.column = struct[0]; + orderBy.entity = struct[1]; + } else if (struct.length === 3) { + orderBy.column = struct[0]; + orderBy.direction = struct[1] as FlowNestedDirection; + orderBy.entity = struct[2]; + } } return orderBy; @@ -351,8 +432,7 @@ export class FlowSearchService { determineStrategy( flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], - flowCategoryFilters: FlowCategoryFilters, - orderBy?: FlowOrderBy + flowCategoryFilters: FlowCategoryFilters ): { strategy: FlowSearchStrategy; conditions: any } { const isFlowFilterDefined = flowFilters !== undefined; const isFlowObjectFilterDefined = flowObjectFilters !== undefined; @@ -361,8 +441,6 @@ export class FlowSearchService { const isFlowCategoryFilterDefined = flowCategoryFilters !== undefined; - const isOrderByForFlows = orderBy?.entity === 'flow'; - if ( (!isFlowFilterDefined && (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && @@ -372,15 +450,6 @@ export class FlowSearchService { !isFlowCategoryFilterDefined) ) { const flowConditions = this.prepareFlowConditions(flowFilters); - if (!isOrderByForFlows) { - return { - strategy: this.flowObjectFiltersStrategy, - conditions: { - conditionsMap: this.buildConditionsMap(flowConditions, {}), - flowCategoryFilters, - }, - }; - } return { strategy: this.onlyFlowFiltersStrategy, conditions: flowConditions, diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 4a1eb492..657ace36 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -7,4 +7,10 @@ export type FlowOrderBy = { column: string; order: SortOrder; entity: string; + direction: FlowNestedDirection | null; }; + +export enum FlowNestedDirection { + source = 'source', + destination = 'destination', +} diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts index 2a458701..02653f11 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts @@ -13,6 +13,7 @@ import { } from '../flowID-search-strategy'; import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; import { GetFlowIdsFromMixedConditionsStrategyImpl } from './get-flowIds-flow-mixed-conditions-strategy-impl'; +import { checkAndMapFlowOrderBy } from './utils'; @Service() export class FlowObjectFiltersStrategy implements FlowSearchStrategy { @@ -82,10 +83,13 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { ], }; + // check and map orderBy to be from entity 'flow' + const orderByFlow = checkAndMapFlowOrderBy(orderBy); + // Obtain flows and flowCount based on flowIDs from filtered flowObjects // and flow conditions const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, searchConditions, orderBy, limit), + this.flowService.getFlows(models, searchConditions, orderByFlow, limit), this.flowService.getFlowsCount(models, countConditions), ]); diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts index f21793c0..53f329ce 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts @@ -6,6 +6,7 @@ import { type FlowSearchStrategy, type FlowSearchStrategyResponse, } from '../flow-search-strategy'; +import { checkAndMapFlowOrderBy } from './utils'; @Service() export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { @@ -23,8 +24,11 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { [Cond.AND]: [flowConditions ?? {}, cursorCondition ?? {}], }; + // check and map orderBy to be from entity 'flow' + const orderByFlow = checkAndMapFlowOrderBy(orderBy); + const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, searchConditions, orderBy, limit), + this.flowService.getFlows(models, searchConditions, orderByFlow, limit), this.flowService.getFlowsCount(models, flowConditions), ]); diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index 16fa0e84..caf23b31 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -142,3 +142,15 @@ export function mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( flowIDsFromFilteredFlowCategories.includes(flowID) ); } + +export function checkAndMapFlowOrderBy(orderBy: any) { + if (!orderBy) { + return { column: 'updatedAt', order: 'DESC' }; + } + let orderByForFlow = { column: orderBy.column, order: orderBy.order }; + if (orderBy.entity !== 'flow') { + orderByForFlow = { column: 'updatedAt', order: 'DESC' }; + } + + return orderByForFlow; +} From 55f13d326e3df762ddfa1113d3f0f24b45e72796 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 15 Dec 2023 17:14:00 +0100 Subject: [PATCH 43/67] Move 'pending' flow category as shortcut --- .../flows/flow-search-service.ts | 55 +++++++++++++++---- src/domain-services/flows/graphql/args.ts | 13 +++-- .../flows/strategy/flow-search-strategy.ts | 3 +- .../flows/strategy/flowID-search-strategy.ts | 11 +++- .../flow-object-conditions-strategy-impl.ts | 54 ++++++++++-------- ...-flow-category-conditions-strategy-impl.ts | 11 ++-- ...Ids-flow-mixed-conditions-strategy-impl.ts | 8 ++- .../flows/strategy/impl/utils.ts | 11 ++-- 8 files changed, 109 insertions(+), 57 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index fbf8f946..9e79b702 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -19,7 +19,7 @@ import { ReportDetailService } from '../report-details/report-detail-service'; import { type UsageYear } from '../usage-years/grpahql/types'; import { UsageYearService } from '../usage-years/usage-year-service'; import { - type FlowCategoryFilters, + type FlowCategory, type FlowObjectFilters, type SearchFlowsArgs, type SearchFlowsArgsNonPaginated, @@ -62,8 +62,14 @@ export class FlowSearchService { models: Database, filters: SearchFlowsArgs ): Promise { - const { limit, nextPageCursor, prevPageCursor, sortField, sortOrder } = - filters; + const { + limit, + nextPageCursor, + prevPageCursor, + sortField, + sortOrder, + pending: isPendingFlows, + } = filters; const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); @@ -79,7 +85,8 @@ export class FlowSearchService { const { strategy, conditions } = this.determineStrategy( flowFilters, flowObjectFilters, - flowCategoryFilters + flowCategoryFilters, + isPendingFlows ); // Fetch one more item to check for hasNextPage @@ -91,7 +98,8 @@ export class FlowSearchService { models, orderBy, limitComputed, - cursorCondition + cursorCondition, + isPendingFlows ); // Remove the extra item used to check hasNextPage @@ -432,7 +440,8 @@ export class FlowSearchService { determineStrategy( flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], - flowCategoryFilters: FlowCategoryFilters + flowCategoryFilters: FlowCategory[], + isFilterByPendingFlows: boolean ): { strategy: FlowSearchStrategy; conditions: any } { const isFlowFilterDefined = flowFilters !== undefined; const isFlowObjectFilterDefined = flowObjectFilters !== undefined; @@ -440,21 +449,30 @@ export class FlowSearchService { isFlowObjectFilterDefined && flowObjectFilters.length !== 0; const isFlowCategoryFilterDefined = flowCategoryFilters !== undefined; + const isFlowCategoryFilterNotEmpty = + isFlowCategoryFilterDefined && flowCategoryFilters.length !== 0; + const isFilterByPendingFlowsDefined = isFilterByPendingFlows !== undefined; if ( (!isFlowFilterDefined && (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && - !isFlowCategoryFilterDefined) || + !isFlowCategoryFilterNotEmpty && + !isFilterByPendingFlowsDefined) || (isFlowFilterDefined && (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && - !isFlowCategoryFilterDefined) + !isFlowCategoryFilterNotEmpty && + !isFilterByPendingFlowsDefined) ) { const flowConditions = this.prepareFlowConditions(flowFilters); return { strategy: this.onlyFlowFiltersStrategy, conditions: flowConditions, }; - } else if (isFlowObjectFiltersNotEmpty || isFlowCategoryFilterDefined) { + } else if ( + isFlowObjectFiltersNotEmpty || + isFlowCategoryFilterNotEmpty || + isFilterByPendingFlowsDefined + ) { const flowConditions = this.prepareFlowConditions(flowFilters); const flowObjectConditions = this.prepareFlowObjectConditions(flowObjectFilters); @@ -632,15 +650,28 @@ export class FlowSearchService { models: Database, args: SearchFlowsArgsNonPaginated ): Promise { - const { flowFilters, flowObjectFilters, flowCategoryFilters } = args; + const { + flowFilters, + flowObjectFilters, + flowCategoryFilters, + pending: isPendingFlows, + } = args; const { strategy, conditions } = this.determineStrategy( flowFilters, flowObjectFilters, - flowCategoryFilters + flowCategoryFilters, + isPendingFlows ); - const { flows, count } = await strategy.search(conditions, models); + const { flows, count } = await strategy.search( + conditions, + models, + undefined, + undefined, + undefined, + isPendingFlows + ); const flowsAmountUSD: Array = flows.map( (flow) => flow.amountUSD diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index b58bfd69..9b142db6 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -31,9 +31,6 @@ export class SearchFlowsFilters { @InputType() export class FlowCategoryFilters { - @Field({ nullable: true }) - pending: boolean; - @Field(() => [FlowCategory], { nullable: true }) categoryFilters: FlowCategory[]; } @@ -81,8 +78,11 @@ export class SearchFlowsArgs extends PaginationArgs { @Field({ nullable: true }) includeChildrenOfParkedFlows: boolean; + @Field(() => [FlowCategory], { nullable: true }) + flowCategoryFilters: FlowCategory[]; + @Field({ nullable: true }) - flowCategoryFilters: FlowCategoryFilters; + pending: boolean; } @ArgsType() @@ -96,6 +96,9 @@ export class SearchFlowsArgsNonPaginated { @Field({ nullable: true }) includeChildrenOfParkedFlows: boolean; + @Field(() => [FlowCategory], { nullable: true }) + flowCategoryFilters: FlowCategory[]; + @Field({ nullable: true }) - flowCategoryFilters: FlowCategoryFilters; + pending: boolean; } diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index c2cb3a19..189a2758 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -14,6 +14,7 @@ export interface FlowSearchStrategy { models: Database, orderBy?: any, limit?: number, - cursorCondition?: any + cursorCondition?: any, + filterByPendingFlows?: boolean ): Promise; } diff --git a/src/domain-services/flows/strategy/flowID-search-strategy.ts b/src/domain-services/flows/strategy/flowID-search-strategy.ts index 4a721325..e63d09a3 100644 --- a/src/domain-services/flows/strategy/flowID-search-strategy.ts +++ b/src/domain-services/flows/strategy/flowID-search-strategy.ts @@ -1,6 +1,6 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { type FlowCategoryFilters } from '../graphql/args'; +import { type FlowCategory } from '../graphql/args'; export interface FlowIdSearchStrategyResponse { flowIDs: FlowId[]; @@ -10,8 +10,13 @@ export interface FlowIDSearchStrategy { search( models: Database, flowObjectsConditions: Map>, - flowCategoryConditions: FlowCategoryFilters + flowCategoryConditions: FlowCategory[], + filterByPendingFlows?: boolean ): Promise; - generateWhereClause(flowIds: FlowId[], conditions: any): any; + generateWhereClause( + flowIds: FlowId[], + conditions: any, + filterByPendingFlows?: boolean + ): any; } diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts index 02653f11..dec4de19 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts @@ -2,7 +2,7 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; -import { type FlowCategoryFilters } from '../../graphql/args'; +import { type FlowCategory } from '../../graphql/args'; import { type FlowSearchStrategy, type FlowSearchStrategyResponse, @@ -27,12 +27,13 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { async search( flowConditions: { conditionsMap: Map; - flowCategoryFilters: FlowCategoryFilters; + flowCategoryFilters: FlowCategory[]; }, models: Database, orderBy?: any, limit?: number, - cursorCondition?: any + cursorCondition?: any, + filterByPendingFlows?: boolean ): Promise { const flowConditionsMap = flowConditions.conditionsMap; // Obtain flowObjects conditions @@ -45,30 +46,31 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { const flowEntityConditions = flowConditionsMap.get('flow') ?? new Map(); // Obtain flowCategory conditions - const flowCategoryConditions = flowConditions.flowCategoryFilters ?? {}; + const flowCategoryConditions = flowConditions.flowCategoryFilters ?? []; const searchFlowIdsStrategy: FlowIDSearchStrategy = this.determineStrategy( flowObjectsConditions, - flowCategoryConditions + flowCategoryConditions, + filterByPendingFlows ); const { flowIDs: flowIdsToFilter }: FlowIdSearchStrategyResponse = await searchFlowIdsStrategy.search( models, flowObjectsConditions, - flowCategoryConditions + flowCategoryConditions, + filterByPendingFlows ); + const whereClauseFromStrategy = searchFlowIdsStrategy.generateWhereClause( + flowIdsToFilter, + flowCategoryConditions, + filterByPendingFlows + ); + // Combine conditions from flowObjects FlowIDs and flow conditions const countConditions = { - [Cond.AND]: [ - flowEntityConditions ?? {}, - - searchFlowIdsStrategy.generateWhereClause( - flowIdsToFilter, - flowCategoryConditions - ), - ], + [Cond.AND]: [flowEntityConditions ?? {}, whereClauseFromStrategy ?? {}], }; // Combine cursor condition with flow conditions @@ -76,10 +78,7 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { [Cond.AND]: [ flowEntityConditions ?? {}, cursorCondition ?? {}, - searchFlowIdsStrategy.generateWhereClause( - flowIdsToFilter, - flowCategoryConditions - ), + whereClauseFromStrategy ?? {}, ], }; @@ -111,24 +110,31 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { // otherwise keep all flowIDs from the one that is not empty determineStrategy( flowObjectsConditions: Map>, - flowCategoryConditions: any + flowCategoryConditions: any, + filterByPendingFlows?: boolean ): any { const isFlowObjectsConditionsIsDefined = flowObjectsConditions !== undefined; const isFlowCategoryConditionsIsDefined = flowCategoryConditions !== undefined; + const isFilterByPendingFlowsIsDefined = filterByPendingFlows !== undefined; const flowObjectsConditionsIsNotEmpty = isFlowObjectsConditionsIsDefined && flowObjectsConditions.size; - const flowCategoryConditionsIsNotEmpty = - isFlowCategoryConditionsIsDefined && - Object.keys(flowCategoryConditions).length; + const isFlowCategoryConditionsIsNotEmpty = + isFlowCategoryConditionsIsDefined && flowCategoryConditions.length !== 0; - if (flowObjectsConditionsIsNotEmpty && flowCategoryConditionsIsNotEmpty) { + if ( + flowObjectsConditionsIsNotEmpty && + (isFlowCategoryConditionsIsNotEmpty || isFilterByPendingFlowsIsDefined) + ) { return this.getFlowIdsFromMixedConditions; } else if (flowObjectsConditionsIsNotEmpty) { return this.getFlowIdsFromObjectConditions; - } else if (flowCategoryConditionsIsNotEmpty) { + } else if ( + isFlowCategoryConditionsIsNotEmpty || + isFilterByPendingFlowsIsDefined + ) { return this.getFlowIdsFromCategoryConditions; } throw new Error( diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index 3e95bc83..89339d88 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -5,7 +5,7 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { CategoryService } from '../../../categories/category-service'; -import { type FlowCategoryFilters } from '../../graphql/args'; +import { type FlowCategory } from '../../graphql/args'; import { type FlowIDSearchStrategy, type FlowIdSearchStrategyResponse, @@ -21,9 +21,11 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl async search( models: Database, _flowObjectsConditions: Map>, - flowCategoryConditions: FlowCategoryFilters + flowCategoryConditions: FlowCategory[], + filterByPendingFlows: boolean ): Promise { const whereClause = mapFlowCategoryConditionsToWhereClause( + filterByPendingFlows, flowCategoryConditions ); @@ -55,9 +57,10 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl generateWhereClause( flowIds: FlowId[], - flowCategoryConditions: FlowCategoryFilters + _conditions: any, + filterByPendingFlows: boolean ) { - const operation = flowCategoryConditions.pending ? Op.IN : Op.NOT_IN; + const operation = filterByPendingFlows === true ? Op.IN : Op.NOT_IN; return { id: { [operation]: flowIds, diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts index 99ff6626..ff046cc6 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts @@ -2,7 +2,7 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; -import { type FlowCategoryFilters } from '../../graphql/args'; +import { type FlowCategory } from '../../graphql/args'; import { type FlowIDSearchStrategy, type FlowIdSearchStrategyResponse, @@ -23,7 +23,8 @@ export class GetFlowIdsFromMixedConditionsStrategyImpl async search( models: Database, flowObjectsConditions: Map>, - flowCategoryConditions: FlowCategoryFilters + flowCategoryConditions: FlowCategory[], + filterByPendingFlows: boolean ): Promise { const { flowIDs: flowIdsFromFlowObjects }: FlowIdSearchStrategyResponse = await this.getFlowIdsFromObjectConditionsStrategy.search( @@ -35,7 +36,8 @@ export class GetFlowIdsFromMixedConditionsStrategyImpl await this.getFlowIdsFromCategoryConditionsStrategy.search( models, flowObjectsConditions, - flowCategoryConditions + flowCategoryConditions, + filterByPendingFlows ); const mergeFlowIDs: FlowId[] = diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index caf23b31..2db4fcf5 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -1,6 +1,6 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { type FlowCategoryFilters } from '../../graphql/args'; +import { type FlowCategory } from '../../graphql/args'; /* * Map structure: @@ -38,24 +38,25 @@ export function mapFlowObjectConditionsToWhereClause( } export function mapFlowCategoryConditionsToWhereClause( - flowCategoryConditions: FlowCategoryFilters + filterByPendingFlows: boolean, + flowCategoryConditions: FlowCategory[] ) { let whereClause = {}; - if (flowCategoryConditions.pending !== undefined) { + if (filterByPendingFlows !== undefined) { whereClause = { group: 'inactiveReason', name: 'Pending review', }; } - if (flowCategoryConditions.categoryFilters?.length > 0) { + if (flowCategoryConditions.length > 0) { // Map category filters // getting Id when possible // or name and group otherwise const categoryIdFilters: number[] = []; const categoryFilters = new Map(); - for (const categoryFilter of flowCategoryConditions.categoryFilters) { + for (const categoryFilter of flowCategoryConditions) { if (categoryFilter.id) { categoryIdFilters.push(categoryFilter.id); } else if (categoryFilter.group && categoryFilter.name) { From ecbdf0562c2ce47bfd31d6d48bc6acc81834d340 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 19 Dec 2023 09:55:10 +0100 Subject: [PATCH 44/67] Refactor approach --- package.json | 2 +- src/domain-services/Context.ts | 2 + src/domain-services/flow-object/model.ts | 15 + .../flows/flow-search-service.ts | 263 ++++++++++++++- src/domain-services/flows/flow-service.ts | 46 ++- src/domain-services/flows/graphql/resolver.ts | 6 +- src/domain-services/flows/model.ts | 7 +- .../flows/strategy/flow-search-strategy.ts | 18 + .../flow-object-conditions-strategy-impl.ts | 18 +- ...-flow-category-conditions-strategy-impl.ts | 2 +- .../only-flow-conditions-strategy-impl.ts | 47 ++- .../search-flow-by-filters-strategy-impl.ts | 307 ++++++++++++++++++ .../flows/strategy/impl/utils.ts | 69 +++- yarn.lock | 3 +- 14 files changed, 782 insertions(+), 23 deletions(-) create mode 100644 src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts diff --git a/package.json b/package.json index f347291f..575d43cc 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "lint": "yarn lint-prettier && yarn lint-eslint" }, "dependencies": { - "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#8ca426e47d4a9431484815687096dda84747f0e2", + "@unocha/hpc-api-core": "../hpc-api-core", "apollo-server-hapi": "^3.12.0", "bunyan": "^1.8.15", "class-validator": "^0.14.0", diff --git a/src/domain-services/Context.ts b/src/domain-services/Context.ts index 6b7871a5..b31b774c 100644 --- a/src/domain-services/Context.ts +++ b/src/domain-services/Context.ts @@ -1,5 +1,7 @@ import { type Database } from '@unocha/hpc-api-core/src/db/type'; +import type Knex from 'knex'; export default interface Context { models: Database; + connection: Knex; } diff --git a/src/domain-services/flow-object/model.ts b/src/domain-services/flow-object/model.ts index e8c19d72..7ca369ea 100644 --- a/src/domain-services/flow-object/model.ts +++ b/src/domain-services/flow-object/model.ts @@ -2,3 +2,18 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; export type FlowObject = InstanceOfModel; +export type FlowObjectType = + | 'governingEntity' + | 'plan' + | 'planEntity' + | 'project' + | 'globalCluster' + | 'organization' + | 'emergency' + | 'flow' + | 'location' + | 'anonymizedOrganization' + | 'cluster' + | 'corePlanEntityActivity' + | 'corePlanEntityObjective' + | 'usageYear'; diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 9e79b702..253edf43 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,6 +1,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import type Knex from 'knex'; import { Service } from 'typedi'; import { type SortOrder } from '../../utils/graphql/pagination'; import { CategoryService } from '../categories/category-service'; @@ -41,12 +42,14 @@ import { import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy-impl'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy-impl'; +import { SearchFlowByFiltersStrategy } from './strategy/impl/search-flow-by-filters-strategy-impl'; @Service() export class FlowSearchService { constructor( private readonly onlyFlowFiltersStrategy: OnlyFlowFiltersStrategy, private readonly flowObjectFiltersStrategy: FlowObjectFiltersStrategy, + private readonly searchFlowByFiltersStrategy: SearchFlowByFiltersStrategy, private readonly organizationService: OrganizationService, private readonly locationService: LocationService, private readonly planService: PlanService, @@ -357,11 +360,261 @@ export class FlowSearchService { }; } + async searchV2( + models: Database, + databaseConnection: Knex, + filters: SearchFlowsArgs + ): Promise { + const { limit, nextPageCursor, prevPageCursor, sortField, sortOrder } = + filters; + + const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); + + const { + flowFilters, + flowObjectFilters, + flowCategoryFilters, + pending: isPendingFlows, + } = filters; + + // Once we've gathered all the filters, we need to determine the strategy + // to use in order to obtain the flowIDs + const strategy: FlowSearchStrategy = this.determineStrategyV2( + flowFilters, + flowObjectFilters, + flowCategoryFilters, + isPendingFlows, + orderBy + ); + + // Build cursor condition + const cursorCondition = this.buildCursorCondition( + prevPageCursor, + nextPageCursor, + orderBy + ); + + const { flows, count } = await strategy.searchV2( + models, + databaseConnection, + limit, + orderBy, + cursorCondition, + flowFilters, + flowObjectFilters, + flowCategoryFilters, + isPendingFlows + ); + + // Remove the extra item used to check hasNextPage + const hasNextPage = flows.length > limit; + if (hasNextPage) { + flows.pop(); + } + + const flowIds: FlowId[] = []; + const flowWithVersion: Map = new Map(); + + // Obtain flow IDs and flow version IDs + for (const flow of flows) { + flowIds.push(flow.id); + if (!flowWithVersion.has(flow.id)) { + flowWithVersion.set(flow.id, []); + } + const flowVersionIDs = flowWithVersion.get(flow.id)!; + flowVersionIDs.push(flow.versionID); + } + + // Obtain external references and flow objects in parallel + const [externalReferencesMap, flowObjects] = await Promise.all([ + this.externalReferenceService.getExternalReferencesForFlows( + flowIds, + models + ), + this.flowObjectService.getFlowObjectByFlowId(models, flowIds), + ]); + + // Map flow objects to their respective arrays + const organizationsFO: FlowObject[] = []; + const locationsFO: FlowObject[] = []; + const plansFO: FlowObject[] = []; + const usageYearsFO: FlowObject[] = []; + + this.groupByFlowObjectType( + flowObjects, + organizationsFO, + locationsFO, + plansFO, + usageYearsFO + ); + + // Obtain flow links + const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( + flowIds, + models + ); + + // Perform all nested queries in parallel + const [ + categoriesMap, + organizationsMap, + locationsMap, + plansMap, + usageYearsMap, + reportDetailsMap, + ] = await Promise.all([ + this.categoryService.getCategoriesForFlows(flowWithVersion, models), + this.organizationService.getOrganizationsForFlows( + organizationsFO, + models + ), + this.locationService.getLocationsForFlows(locationsFO, models), + this.planService.getPlansForFlows(plansFO, models), + this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), + this.reportDetailService.getReportDetailsForFlows(flowIds, models), + ]); + + const items = await Promise.all( + flows.map(async (flow) => { + const flowLink = flowLinksMap.get(flow.id) ?? []; + + // Categories Map follows the structure: + // flowID: { versionID: [categories]} + // So we need to get the categories for the flow version + const categories = + categoriesMap.get(flow.id)!.get(flow.versionID) ?? []; + const organizations = organizationsMap.get(flow.id) ?? []; + const locations = locationsMap.get(flow.id) ?? []; + const plans = plansMap.get(flow.id) ?? []; + const usageYears = usageYearsMap.get(flow.id) ?? []; + const externalReferences = externalReferencesMap.get(flow.id) ?? []; + const reportDetails = reportDetailsMap.get(flow.id) ?? []; + + const reportDetailsWithChannel = + this.reportDetailService.addChannelToReportDetails( + reportDetails, + categories + ); + + let parkedParentSource: FlowParkedParentSource[] = []; + if (flow.activeStatus && flowLink.length > 0) { + parkedParentSource = await this.getParketParents( + flow, + flowLink, + models + ); + } + + const childIDs: number[] = + (flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.childID.valueOf()) as number[]) ?? []; + + const parentIDs: number[] = + (flowLinksMap + .get(flow.id) + ?.filter( + (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 + ) + .map((flowLink) => flowLink.parentID.valueOf()) as number[]) ?? []; + + return this.buildFlowDTO( + flow, + categories, + organizations, + locations, + plans, + usageYears, + childIDs, + parentIDs, + externalReferences, + reportDetailsWithChannel, + parkedParentSource + ); + }) + ); + + const isOrderByForFlows = orderBy.entity === 'flow'; + const firstItem = items[0]; + const prevPageCursorEntity = isOrderByForFlows + ? firstItem + : firstItem[orderBy.entity as keyof typeof firstItem]; + const prevPageCursorValue = prevPageCursorEntity + ? prevPageCursorEntity[ + orderBy.column as keyof typeof prevPageCursorEntity + ] ?? '' + : ''; + + const lastItem = items.at(-1); + const nextPageCursorEntity = isOrderByForFlows + ? lastItem + : lastItem![orderBy.entity as keyof typeof lastItem]; + const nextPageCursorValue = nextPageCursorEntity + ? nextPageCursorEntity[ + orderBy.column as keyof typeof nextPageCursorEntity + ]?.toString() ?? '' + : ''; + + return { + flows: items, + hasNextPage: limit <= flows.length, + hasPreviousPage: nextPageCursor !== undefined, + prevPageCursor: prevPageCursorValue, + nextPageCursor: nextPageCursorValue, + pageSize: flows.length, + sortField: `${orderBy.entity}.${orderBy.column}` as FlowSortField, + sortOrder: sortOrder ?? 'desc', + total: count, + }; + } + + determineStrategyV2( + flowFilters: SearchFlowsFilters, + flowObjectFilters: FlowObjectFilters[], + flowCategoryFilters: FlowCategory[], + isPendingFlows: boolean, + orderBy: FlowOrderBy + ) { + // If there are no filters (flowFilters, flowObjectFilters, flowCategoryFilters or pending) + // and there is no sortByEntity (orderBy.entity === 'flow') + // use onlyFlowFiltersStrategy + // If there are no sortByEntity (orderBy.entity === 'flow') + // but flowFilters only + // use onlyFlowFiltersStrategy + const isOrderByEntityFlow = orderBy.entity === 'flow'; + const isFlowFiltersDefined = flowFilters !== undefined; + const isFlowObjectFiltersDefined = flowObjectFilters !== undefined; + const isFlowCategoryFiltersDefined = flowCategoryFilters !== undefined; + const isFilterByPendingFlowsDefined = isPendingFlows !== undefined; + + const isNoFilterDefined = + !isFlowFiltersDefined && + !isFlowObjectFiltersDefined && + !isFlowCategoryFiltersDefined && + !isFilterByPendingFlowsDefined; + const isFlowFiltersOnly = + isFlowFiltersDefined && + !isFlowObjectFiltersDefined && + !isFlowCategoryFiltersDefined && + !isFilterByPendingFlowsDefined; + + if (isOrderByEntityFlow && (isNoFilterDefined || isFlowFiltersOnly)) { + // use onlyFlowFiltersStrategy + return this.onlyFlowFiltersStrategy; + } + + // Otherwise, use flowObjectFiltersStrategy + return this.searchFlowByFiltersStrategy; + } + buildOrderBy(sortField?: FlowSortField, sortOrder?: SortOrder) { const orderBy: FlowOrderBy = { column: sortField ?? 'updatedAt', order: sortOrder ?? ('desc' as SortOrder), - direction: null, + direction: undefined, entity: 'flow', }; @@ -374,12 +627,12 @@ export class FlowSearchService { const struct = orderBy.column.split('.'); if (struct.length === 2) { - orderBy.column = struct[0]; - orderBy.entity = struct[1]; + orderBy.column = struct[1]; + orderBy.entity = struct[0]; } else if (struct.length === 3) { - orderBy.column = struct[0]; + orderBy.column = struct[2]; orderBy.direction = struct[1] as FlowNestedDirection; - orderBy.entity = struct[2]; + orderBy.entity = struct[0]; } } diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 353a8104..e1720408 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -1,5 +1,10 @@ +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; +import type Knex from 'knex'; import { Service } from 'typedi'; +import { type FlowObjectType } from '../flow-object/model'; +import { type FlowOrderBy } from './model'; +import { mapFlowOrderBy } from './strategy/impl/utils'; @Service() export class FlowService { @@ -9,16 +14,55 @@ export class FlowService { models: Database, conditions: any, orderBy?: any, - limit?: number + limit?: number, + rawOrderBy?: string ) { return await models.flow.find({ orderBy, limit, where: conditions, + orderByRaw: rawOrderBy, }); } async getFlowsCount(models: Database, conditions: any) { return await models.flow.count({ where: conditions }); } + + async getFlowIDsFromEntity( + models: Database, + dbConnection: Knex, + orderBy: FlowOrderBy, + limit: number + ): Promise { + const entity = orderBy.entity; + + // Get the entity list + const mappedOrderBy = mapFlowOrderBy(orderBy); + const entityList = await dbConnection + .queryBuilder() + .select('id') + .from(entity) + .orderBy(mappedOrderBy.column, mappedOrderBy.order); + + const entityIDs = entityList.map((entity) => entity.id); + + // Get the flowIDs from the entity list + // using the flow-object relation + const entityCondKey = entity.toString() as unknown as FlowObjectType; + + const query = dbConnection + .queryBuilder() + .select('flowID') + .from('flowObject') + .whereIn('objectID', entityIDs) + .andWhere('objectType', entityCondKey) + .andWhere('refDirection', orderBy.direction!) + .orderByRaw(`array_position(ARRAY[${entityIDs.join(',')}], "objectID")`) + .limit(limit); + + const flowIDs = await query; + + return flowIDs.map((flowID) => flowID.flowID); + } } diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 241bb988..a44538e7 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -21,7 +21,11 @@ export default class FlowResolver { @Args(() => SearchFlowsArgs, { validate: false }) args: SearchFlowsArgs ): Promise { - return await this.flowSearchService.search(context.models, args); + return await this.flowSearchService.searchV2( + context.models, + context.connection, + args + ); } @Query(() => FlowSearchTotalAmountResult) diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 657ace36..450fe360 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -7,10 +7,7 @@ export type FlowOrderBy = { column: string; order: SortOrder; entity: string; - direction: FlowNestedDirection | null; + direction: FlowNestedDirection | undefined; }; -export enum FlowNestedDirection { - source = 'source', - destination = 'destination', -} +export type FlowNestedDirection = 'source' | 'destination'; diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 189a2758..9dfa94d3 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -1,4 +1,10 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; +import type Knex from 'knex'; +import { + type FlowCategory, + type FlowObjectFilters, + type SearchFlowsFilters, +} from '../graphql/args'; import { type FlowEntity } from '../model'; export interface FlowSearchStrategyResponse { @@ -17,4 +23,16 @@ export interface FlowSearchStrategy { cursorCondition?: any, filterByPendingFlows?: boolean ): Promise; + + searchV2( + models: Database, + databaseConnection: Knex, + limit: number, + orderBy: any, + cursorCondition: any | undefined, + flowFilters: SearchFlowsFilters, + flowObjectFilters: FlowObjectFilters[], + flowCategoryFilters: FlowCategory[], + searchPendingFlows: boolean | undefined + ): Promise; } diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts index dec4de19..234a8ee9 100644 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts @@ -13,7 +13,7 @@ import { } from '../flowID-search-strategy'; import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; import { GetFlowIdsFromMixedConditionsStrategyImpl } from './get-flowIds-flow-mixed-conditions-strategy-impl'; -import { checkAndMapFlowOrderBy } from './utils'; +import { mapFlowOrderBy } from './utils'; @Service() export class FlowObjectFiltersStrategy implements FlowSearchStrategy { @@ -83,7 +83,7 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { }; // check and map orderBy to be from entity 'flow' - const orderByFlow = checkAndMapFlowOrderBy(orderBy); + const orderByFlow = mapFlowOrderBy(orderBy); // Obtain flows and flowCount based on flowIDs from filtered flowObjects // and flow conditions @@ -141,4 +141,18 @@ export class FlowObjectFiltersStrategy implements FlowSearchStrategy { 'No strategy found for flowObjectsConditions and flowCategoryConditions' ); } + + searchV2( + _models: Database, + _databaseConnection: any, + _limit: number, + _orderBy: any, + _cursorCondition: any, + _flowFilters: any, + _flowObjectFilters: any, + _flowCategoryFilters: any, + _filterByPendingFlows?: boolean + ): Promise { + throw new Error('Method not implemented.'); + } } diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index 89339d88..f811deed 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -22,7 +22,7 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl models: Database, _flowObjectsConditions: Map>, flowCategoryConditions: FlowCategory[], - filterByPendingFlows: boolean + filterByPendingFlows: boolean | undefined ): Promise { const whereClause = mapFlowCategoryConditionsToWhereClause( filterByPendingFlows, diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts index 53f329ce..6ca0d548 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts @@ -1,12 +1,23 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; +import type Knex from 'knex'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; +import { + type FlowCategory, + type FlowObjectFilters, + type SearchFlowsFilters, +} from '../../graphql/args'; +import { type FlowOrderBy } from '../../model'; import { type FlowSearchStrategy, type FlowSearchStrategyResponse, } from '../flow-search-strategy'; -import { checkAndMapFlowOrderBy } from './utils'; +import { + mapCountResultToCountObject, + mapFlowOrderBy, + prepareFlowConditions, +} from './utils'; @Service() export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { @@ -25,7 +36,7 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { }; // check and map orderBy to be from entity 'flow' - const orderByFlow = checkAndMapFlowOrderBy(orderBy); + const orderByFlow = mapFlowOrderBy(orderBy); const [flows, countRes] = await Promise.all([ this.flowService.getFlows(models, searchConditions, orderByFlow, limit), @@ -37,4 +48,36 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { return { flows, count: countObject.count }; } + + async searchV2( + models: Database, + _databaseConnection: Knex, + limit: number, + orderBy: FlowOrderBy, + cursorCondition: any | undefined, + flowFilters: SearchFlowsFilters, + _flowObjectFilters: FlowObjectFilters[], + _flowCategoryFilters: FlowCategory[], + _searchPendingFlows: boolean | undefined + ): Promise { + // Map flowConditions to where clause + const flowConditions = prepareFlowConditions(flowFilters); + + // Build conditions object + const searchConditions = { + [Cond.AND]: [flowConditions ?? {}, cursorCondition ?? {}], + }; + + const orderByFlow = mapFlowOrderBy(orderBy); + + const [flows, countRes] = await Promise.all([ + this.flowService.getFlows(models, searchConditions, orderByFlow, limit), + this.flowService.getFlowsCount(models, flowConditions), + ]); + + // Map count result query to count object + const countObject = mapCountResultToCountObject(countRes); + + return { flows, count: countObject.count }; + } } diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts new file mode 100644 index 00000000..1a2aa44b --- /dev/null +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -0,0 +1,307 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import type Knex from 'knex'; +import { Service } from 'typedi'; +import { type FlowService } from '../../flow-service'; +import { + type FlowCategory, + type FlowObjectFilters, + type SearchFlowsFilters, +} from '../../graphql/args'; +import { type FlowOrderBy } from '../../model'; +import { + type FlowSearchStrategy, + type FlowSearchStrategyResponse, +} from '../flow-search-strategy'; +import { type FlowIdSearchStrategyResponse } from '../flowID-search-strategy'; +import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; +import { GetFlowIdsFromObjectConditionsStrategyImpl } from './get-flowIds-flow-object-conditions-strategy-impl'; +import { + mapCountResultToCountObject, + mapFlowObjectConditions, + mapFlowOrderBy, + prepareFlowConditions, +} from './utils'; + +@Service() +export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { + constructor( + private readonly flowService: FlowService, + private readonly getFlowIdsFromCategoryConditions: GetFlowIdsFromCategoryConditionsStrategyImpl, + private readonly getFlowIdsFromObjectConditions: GetFlowIdsFromObjectConditionsStrategyImpl + ) {} + + search( + _flowConditions: any, + _models: Database, + _orderBy?: any, + _limit?: number, + _cursorCondition?: any, + _filterByPendingFlows?: boolean + ): Promise { + throw new Error('Method not implemented.'); + } + + async searchV2( + models: Database, + databaseConnection: Knex, + limit: number, + orderBy: FlowOrderBy, + cursorCondition: any | undefined, + flowFilters: SearchFlowsFilters, + flowObjectFilters: FlowObjectFilters[], + flowCategoryFilters: FlowCategory[], + searchPendingFlows: boolean | undefined + ): Promise { + // First, we need to check if we need to sort by a certain entity + // and if so, we need to map the orderBy to be from that entity + // obtain the entities relation to the flow + // to be able to sort the flows using the entity + const isSortByEntity = orderBy && orderBy.entity !== 'flow'; + + const sortByFlowIDs: FlowId[] = []; + if (isSortByEntity) { + // Get the flowIDs using the orderBy entity + const flowIDsFromSortingEntity: FlowId[] = + await this.flowService.getFlowIDsFromEntity( + models, + databaseConnection, + orderBy, + limit + ); + sortByFlowIDs.push(...flowIDsFromSortingEntity); + } + + // Now we need to check if we need to filter by category + // if it's using the shorcut 'pending' + // or if there are any flowCategoryFilters + const isSearchByPendingDefined = searchPendingFlows !== undefined; + + const isFilterByCategory = + isSearchByPendingDefined || flowCategoryFilters?.length > 0; + + const flowIDsFromCategoryFilters: FlowId[] = []; + + if (isFilterByCategory) { + const flowIDsFromCategoryStrategy: FlowIdSearchStrategyResponse = + await this.getFlowIdsFromCategoryConditions.search( + models, + new Map(), + flowCategoryFilters ?? [], + searchPendingFlows + ); + flowIDsFromCategoryFilters.push(...flowIDsFromCategoryStrategy.flowIDs); + } + + // After that, we need to check if we need to filter by flowObjects + // if so, we need to obtain the flowIDs from the flowObjects + const isFilterByFlowObjects = flowObjectFilters?.length > 0; + + const flowIDsFromObjectFilters: FlowId[] = []; + if (isFilterByFlowObjects) { + const flowObjectConditionsMap = + mapFlowObjectConditions(flowObjectFilters); + const flowIDsFromObjectStrategy: FlowIdSearchStrategyResponse = + await this.getFlowIdsFromObjectConditions.search( + models, + flowObjectConditionsMap + ); + flowIDsFromObjectFilters.push(...flowIDsFromObjectStrategy.flowIDs); + } + + // Apply only filter conditions but not cursor conditions + let countConditions = {}; + + // Combine cursor condition with flow conditions + let searchConditions = { ...cursorCondition }; + + // Check if we have flowIDs from flowObjects and flowCategoryFilters + // if so, we need to filter by those flowIDs + if (isFilterByFlowObjects && isFilterByCategory) { + if (searchPendingFlows === true) { + // We need to combine the flowIDs from flowObjects and flowCategoryFilters + // to have the least amount of flowIDs to filter + const setOfFlowIDs = new Set([ + ...flowIDsFromCategoryFilters, + ...flowIDsFromObjectFilters, + ]); + const flowIDsFromFilteredFlowObjects = [...setOfFlowIDs]; + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromFilteredFlowObjects, + }, + }, + ], + }; + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromFilteredFlowObjects, + }, + }, + ], + }; + } else { + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [Op.NOT_IN]: flowIDsFromCategoryFilters, + [Op.IN]: flowIDsFromObjectFilters, + }, + }, + ], + }; + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + [Op.NOT_IN]: flowIDsFromCategoryFilters, + [Op.IN]: flowIDsFromObjectFilters, + }, + }, + ], + }; + } + } else if (isFilterByCategory && isSearchByPendingDefined) { + if (searchPendingFlows === true) { + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + } else { + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [Op.NOT_IN]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + [Op.NOT_IN]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + } + } else if (isFilterByCategory && !isSearchByPendingDefined) { + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + } else if (isFilterByFlowObjects) { + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromObjectFilters, + }, + }, + ], + }; + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + [Op.IN]: flowIDsFromObjectFilters, + }, + }, + ], + }; + } + + // After adding the where clauses form the filters + // we need to add the conditions from the flow entity filters + // if there are any + if (flowFilters) { + // Map flowConditions to where clause + const flowConditions = prepareFlowConditions(flowFilters); + + // Combine conditions from flowObjects FlowIDs and flow conditions + countConditions = { + ...countConditions, + [Cond.AND]: [flowConditions ?? {}], + }; + + // Combine cursor condition with flow conditions + searchConditions = { + ...searchConditions, + [Cond.AND]: [flowConditions ?? {}], + }; + } + + let rawOrderBy = undefined; + let orderByFlow = undefined; + if (isSortByEntity) { + rawOrderBy = `array_position(ARRAY[${sortByFlowIDs.join(',')}], "id")`; + } else { + orderByFlow = mapFlowOrderBy(orderBy); + } + + // Temporal + const [flows, countRes] = await Promise.all([ + this.flowService.getFlows( + models, + searchConditions, + orderByFlow, + limit, + rawOrderBy + ), + this.flowService.getFlowsCount(models, countConditions), + ]); + + // Map count result query to count object + const countObject = mapCountResultToCountObject(countRes); + + return { flows, count: countObject.count }; + } +} diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index 2db4fcf5..5848ae8a 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -1,6 +1,10 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { type FlowCategory } from '../../graphql/args'; +import { + type FlowCategory, + type FlowObjectFilters, + type SearchFlowsFilters, +} from '../../graphql/args'; /* * Map structure: @@ -38,7 +42,7 @@ export function mapFlowObjectConditionsToWhereClause( } export function mapFlowCategoryConditionsToWhereClause( - filterByPendingFlows: boolean, + filterByPendingFlows: boolean | undefined, flowCategoryConditions: FlowCategory[] ) { let whereClause = {}; @@ -144,7 +148,7 @@ export function mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( ); } -export function checkAndMapFlowOrderBy(orderBy: any) { +export function mapFlowOrderBy(orderBy: any) { if (!orderBy) { return { column: 'updatedAt', order: 'DESC' }; } @@ -155,3 +159,62 @@ export function checkAndMapFlowOrderBy(orderBy: any) { return orderByForFlow; } + +export function prepareFlowConditions(flowFilters: SearchFlowsFilters): any { + let flowConditions = {}; + + if (flowFilters) { + for (const [key, value] of Object.entries(flowFilters)) { + if (value !== undefined) { + if (Array.isArray(value) && value.length !== 0) { + flowConditions = { ...flowConditions, [key]: { [Op.IN]: value } }; + } else { + flowConditions = { ...flowConditions, [key]: value }; + } + } + } + } + + return flowConditions; +} + +export function mapCountResultToCountObject(countRes: any[]) { + // Map count result query to count object + const countObject = countRes[0] as { count: number }; + + return countObject; +} + +export function mapFlowObjectConditions( + flowObjectFilters: FlowObjectFilters[] = [] +): Map> { + const flowObjectsConditions: Map> = new Map< + string, + Map + >(); + + for (const flowObjectFilter of flowObjectFilters) { + const { objectType, direction, objectID } = flowObjectFilter; + + if (!flowObjectsConditions.has(objectType)) { + flowObjectsConditions.set(objectType, new Map()); + } + + const refDirectionMap = flowObjectsConditions.get(objectType); + if (!refDirectionMap!.has(direction)) { + refDirectionMap!.set(direction, []); + } + + const objectIDsArray = refDirectionMap!.get(direction); + + if (objectIDsArray!.includes(objectID)) { + throw new Error( + `Duplicate flow object filter: ${objectType} ${direction} ${objectID}` + ); + } + + objectIDsArray!.push(objectID); + } + + return flowObjectsConditions; +} diff --git a/yarn.lock b/yarn.lock index 746df1f4..581d2d66 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1624,9 +1624,8 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@unocha/hpc-api-core@github:UN-OCHA/hpc-api-core#8ca426e47d4a9431484815687096dda84747f0e2": +"@unocha/hpc-api-core@../hpc-api-core": version "7.0.0" - resolved "https://codeload.github.com/UN-OCHA/hpc-api-core/tar.gz/8ca426e47d4a9431484815687096dda84747f0e2" dependencies: "@types/lodash" "^4.14.194" "@types/node-fetch" "2.6.3" From 4d8e0c5253b356c9c36d1629e54ff5542a8996e6 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 22 Dec 2023 10:31:17 +0100 Subject: [PATCH 45/67] Add 'restricted' flow filter --- src/domain-services/flows/graphql/args.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 9b142db6..05f5f8ed 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -27,6 +27,9 @@ export class SearchFlowsFilters { @Field({ name: 'legacyID', nullable: true }) legacyId: number; + + @Field({ nullable: true }) + restricted: boolean; } @InputType() From 74ec5955525b448d54d4fffc5fe690e80e42ad1e Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 22 Dec 2023 10:48:56 +0100 Subject: [PATCH 46/67] Return nested collections by direction Fix repeated criteria --- .../flows/flow-search-service.ts | 33 +++++++++++++++++++ src/domain-services/flows/graphql/types.ts | 29 ++++++++++++++++ .../location/location-service.ts | 7 +++- src/domain-services/plans/plan-service.ts | 8 ++++- .../usage-years/usage-year-service.ts | 8 ++++- 5 files changed, 82 insertions(+), 3 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 253edf43..23e9099b 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -896,9 +896,42 @@ export class FlowSearchService { externalReferences, reportDetails, parkedParentSource, + + // Separate nested fields by source and destination + // Source + sourceUsageYears: this.mapNestedPropertyByDirection(usageYears, 'source'), + sourceLocations: this.mapNestedPropertyByDirection(locations, 'source'), + sourcePlans: this.mapNestedPropertyByDirection(plans, 'source'), + sourceOrganizations: this.mapNestedPropertyByDirection( + organizations, + 'source' + ), + // Destination + destinationUsageYears: this.mapNestedPropertyByDirection( + usageYears, + 'destination' + ), + destinationLocations: this.mapNestedPropertyByDirection( + locations, + 'destination' + ), + destinationPlans: this.mapNestedPropertyByDirection(plans, 'destination'), + destinationOrganizations: this.mapNestedPropertyByDirection( + organizations, + 'destination' + ), }; } + private mapNestedPropertyByDirection( + nestedProperty: any[], + direction: string + ) { + return nestedProperty.filter( + (nestedProperty) => nestedProperty.direction === direction + ); + } + async searchTotalAmount( models: Database, args: SearchFlowsArgsNonPaginated diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 30964bba..261706c4 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -64,18 +64,47 @@ export class Flow extends BaseFlow { @Field(() => [Category], { nullable: false }) categories: Category[]; + // Organizations @Field(() => [Organization], { nullable: false }) organizations: Organization[]; + @Field(() => [Organization], { nullable: false }) + sourceOrganizations: Organization[]; + + @Field(() => [Organization], { nullable: false }) + destinationOrganizations: Organization[]; + + // Plans @Field(() => [BasePlan], { nullable: false }) plans: BasePlan[]; + @Field(() => [BasePlan], { nullable: false }) + sourcePlans: BasePlan[]; + + @Field(() => [BasePlan], { nullable: false }) + destinationPlans: BasePlan[]; + + // Locations @Field(() => [BaseLocation], { nullable: false }) locations: BaseLocation[]; + @Field(() => [BaseLocation], { nullable: false }) + sourceLocations: BaseLocation[]; + + @Field(() => [BaseLocation], { nullable: false }) + destinationLocations: BaseLocation[]; + + // UsageYears @Field(() => [UsageYear], { nullable: false }) usageYears: UsageYear[]; + @Field(() => [UsageYear], { nullable: false }) + sourceUsageYears: UsageYear[]; + + @Field(() => [UsageYear], { nullable: false }) + destinationUsageYears: UsageYear[]; + + // Nested fields @Field(() => [Number], { nullable: false }) childIDs: number[]; diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index 04274ee6..c571ac9c 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -58,7 +58,12 @@ export class LocationService { if (location) { const locationsPerFlow = locationsMap.get(flowId)!; - if (!locationsPerFlow.some((loc) => loc.id === location.id)) { + if ( + !locationsPerFlow.some( + (loc) => + loc.id === location.id && loc.direction === locFO.refDirection + ) + ) { const locationMapped = this.mapLocationsToFlowLocations( location, locFO diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index 3796566e..2f162110 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -87,7 +87,13 @@ export class PlanService { const plansPerFlow = plansMap.get(flowId)!; - if (!plansPerFlow.some((plan) => plan.id === plan.id)) { + if ( + !plansPerFlow.some( + (plan) => + plan.id === plan.id && + plan.direction === planFlowObject.refDirection + ) + ) { const planMapped = this.mapPlansToFlowPlans( plan, planVersion[0], diff --git a/src/domain-services/usage-years/usage-year-service.ts b/src/domain-services/usage-years/usage-year-service.ts index e903f4d2..471994aa 100644 --- a/src/domain-services/usage-years/usage-year-service.ts +++ b/src/domain-services/usage-years/usage-year-service.ts @@ -32,7 +32,13 @@ export class UsageYearService { if (usageYear) { const usageYearsPerFlow = usageYearsMap.get(flowId)!; - if (!usageYearsPerFlow.some((uYear) => uYear.year === usageYear.year)) { + if ( + !usageYearsPerFlow.some( + (uYear) => + uYear.year === usageYear.year && + uYear.direction === usageYearFO.refDirection + ) + ) { const usageYearMapped = this.mapUsageYearsToFlowUsageYears( usageYear, usageYearFO.refDirection From 124a9eb6f7eb27877638210bfd6521430d59cb1d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 22 Dec 2023 13:38:05 +0100 Subject: [PATCH 47/67] Temp: parked parent organization --- .../flows/flow-search-service.ts | 103 +++++++++++++----- src/domain-services/flows/graphql/types.ts | 4 +- 2 files changed, 77 insertions(+), 30 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 23e9099b..b8e0e7f1 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -1,6 +1,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import type Knex from 'knex'; import { Service } from 'typedi'; import { type SortOrder } from '../../utils/graphql/pagination'; @@ -195,7 +196,7 @@ export class FlowSearchService { categories ); - let parkedParentSource: FlowParkedParentSource[] = []; + let parkedParentSource: FlowParkedParentSource | null = null; if (flow.activeStatus && flowLink.length > 0) { parkedParentSource = await this.getParketParents( flow, @@ -496,8 +497,10 @@ export class FlowSearchService { categories ); - let parkedParentSource: FlowParkedParentSource[] = []; - if (flow.activeStatus && flowLink.length > 0) { + let parkedParentSource: FlowParkedParentSource | null = null; + const shouldLookAfterParentSource = + flow.activeStatus && flowLink.length > 0; + if (shouldLookAfterParentSource) { parkedParentSource = await this.getParketParents( flow, flowLink, @@ -772,47 +775,91 @@ export class FlowSearchService { } } + // TODO: refactor this method + // Move to a proper service and simplify the logic + // and the queries private async getParketParents( flow: any, flowLink: any[], models: Database - ): Promise { - const flowLinksDepth0 = flowLink.filter((flowLink) => flowLink.depth === 0); - - const flowLinksParent = flowLinksDepth0.filter( - (flowLink) => flowLink.parentID === flow.id - ); + ): Promise { + const flowLinksParentsIDs = flowLink + .filter( + (flowLink) => + flowLink.parentID !== flow.id && flowLink.childID === flow.id + ) + .map((flowLink) => flowLink.parentID.valueOf()); + + if (flowLinksParentsIDs.length === 0) { + return null; + } - const categories = await models.category.find({ + const parkedCategory = await models.category.findOne({ where: { group: 'flowType', - name: 'parked', + name: 'Parked', }, }); - const categoriesIDs = categories.map((category) => category.id); + const parentFlows: number[] = []; + + for (const flowLinkParentID of flowLinksParentsIDs) { + const parkedParentCategoryRef = await models.categoryRef.find({ + where: { + categoryID: parkedCategory?.id, + versionID: flow.versionID, + objectID: flowLinkParentID, + objectType: 'flow', + }, + }); + + if (parkedParentCategoryRef && parkedParentCategoryRef.length > 0) { + parentFlows.push(flowLinkParentID); + } + } - const categoryRef = await models.categoryRef.find({ + const parkedParentFlowObjectsOrganizationSource = []; + + for (const parentFlow of parentFlows) { + const parkedParentOrganizationFlowObject = + await models.flowObject.findOne({ + where: { + flowID: createBrandedValue(parentFlow), + objectType: 'organization', + refDirection: 'source', + versionID: flow.versionID, + }, + }); + parkedParentFlowObjectsOrganizationSource.push( + parkedParentOrganizationFlowObject + ); + } + + const parkedParentOrganizations = await models.organization.find({ where: { - categoryID: { - [Op.IN]: categoriesIDs, + id: { + [Op.IN]: parkedParentFlowObjectsOrganizationSource.map((flowObject) => + createBrandedValue(flowObject?.objectID) + ), }, - versionID: flow.versionID, }, }); - const parentFlows = flowLinksParent - .filter((flowLink) => { - return categoryRef.some( - (categoryRef) => - categoryRef.objectID.valueOf() === flowLink.parentID.valueOf() - ); - }) - .map((flowLink) => { - return flowLink.parentID.valueOf(); - }); + const mappedParkedParentOrganizations: FlowParkedParentSource = { + organization: [], + orgName: [], + }; + + for (const parkedParentOrganization of parkedParentOrganizations) { + mappedParkedParentOrganizations.organization.push( + parkedParentOrganization.id.valueOf() + ); + mappedParkedParentOrganizations.orgName.push( + parkedParentOrganization.name + ); + } - return parentFlows; + return mappedParkedParentOrganizations; } private buildCursorCondition( @@ -872,7 +919,7 @@ export class FlowSearchService { parentIDs: number[], externalReferences: any[], reportDetails: any[], - parkedParentSource: FlowParkedParentSource[] + parkedParentSource: FlowParkedParentSource | null ): Flow { return { // Mandatory fields diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 261706c4..447b84db 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -123,8 +123,8 @@ export class Flow extends BaseFlow { @Field(() => [ReportDetail], { nullable: false }) reportDetails: ReportDetail[]; - @Field(() => [FlowParkedParentSource], { nullable: false }) - parkedParentSource: FlowParkedParentSource[]; + @Field(() => FlowParkedParentSource, { nullable: true }) + parkedParentSource: FlowParkedParentSource | null; } @ObjectType() From d17595ca437ae95a4826086e131e02bec6b7a23d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Fri, 22 Dec 2023 12:41:00 +0100 Subject: [PATCH 48/67] Add all flow properties --- .../flows/flow-search-service.ts | 22 +++++++++- src/domain-services/flows/graphql/types.ts | 42 ++++++++++++++++--- 2 files changed, 56 insertions(+), 8 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index b8e0e7f1..bb3d19f5 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -930,6 +930,25 @@ export class FlowSearchService { updatedAt: flow.updatedAt.toISOString(), activeStatus: flow.activeStatus, restricted: flow.restricted, + flowDate: flow.flowDate ? flow.flowDate.toISOString() : null, + decisionDate: flow.decisionDate ? flow.decisionDate.toISOString() : null, + firstReportedDate: flow.firstReportedDate + ? flow.firstReportedDate.toISOString() + : null, + budgetYear: flow.budgetYear, + exchangeRate: flow.exchangeRate ? flow.exchangeRate.toString() : null, + origAmount: flow.origAmount ? flow.origAmount.toString() : null, + origCurrency: flow.origCurrency ? flow.origCurrency.toString() : null, + description: flow.description, + notes: flow.notes, + versionStartDate: flow.versionStartDate + ? flow.versionStartDate.toISOString() + : null, + versionEndDate: flow.versionEndDate + ? flow.versionEndDate.toISOString() + : null, + newMoney: flow.newMoney, + // Optional fields categories, organizations, @@ -938,8 +957,7 @@ export class FlowSearchService { usageYears, childIDs, parentIDs, - origAmount: flow.origAmount ? flow.origAmount.toString() : null, - origCurrency: flow.origCurrency ? flow.origCurrency.toString() : null, + externalReferences, reportDetails, parkedParentSource, diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 447b84db..0a5d6ddd 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -57,6 +57,42 @@ export class BaseFlow extends BaseType { @Field({ nullable: false }) restricted: boolean; + + @Field(() => String, { nullable: true }) + flowDate: string | null; + + @Field(() => String, { nullable: true }) + decisionDate: string | null; + + @Field(() => String, { nullable: true }) + firstReportedDate: string | null; + + @Field(() => String, { nullable: true }) + budgetYear: string | null; + + @Field(() => String, { nullable: true }) + exchangeRate: string | null; + + @Field(() => String, { nullable: true }) + origAmount: string | null; + + @Field(() => String, { nullable: true }) + origCurrency: string | null; + + @Field(() => String, { nullable: true }) + description: string | null; + + @Field(() => String, { nullable: true }) + notes: string | null; + + @Field(() => String, { nullable: true }) + versionStartDate: string | null; + + @Field(() => String, { nullable: true }) + versionEndDate: string | null; + + @Field(() => Boolean, { nullable: true }) + newMoney: boolean | null; } @ObjectType() @@ -111,12 +147,6 @@ export class Flow extends BaseFlow { @Field(() => [Number], { nullable: false }) parentIDs: number[]; - @Field(() => String, { nullable: true }) - origAmount: string | null; - - @Field(() => String, { nullable: true }) - origCurrency: string | null; - @Field(() => [FlowExternalReference], { nullable: false }) externalReferences: FlowExternalReference[]; From d8130086bb36d4adcab6118caae18f27c2ba4bc8 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 26 Dec 2023 07:16:27 +0100 Subject: [PATCH 49/67] Temp: add ref to hpc-api-core --- package.json | 2 +- yarn.lock | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 575d43cc..54b9f0b7 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "lint": "yarn lint-prettier && yarn lint-eslint" }, "dependencies": { - "@unocha/hpc-api-core": "../hpc-api-core", + "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#3a3030ee83ad77e5fd7c40238d5ecabe1e6c7da9", "apollo-server-hapi": "^3.12.0", "bunyan": "^1.8.15", "class-validator": "^0.14.0", diff --git a/yarn.lock b/yarn.lock index 581d2d66..24ba2e20 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1624,8 +1624,9 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@unocha/hpc-api-core@../hpc-api-core": +"@unocha/hpc-api-core@github:UN-OCHA/hpc-api-core#3a3030ee83ad77e5fd7c40238d5ecabe1e6c7da9": version "7.0.0" + resolved "https://codeload.github.com/UN-OCHA/hpc-api-core/tar.gz/3a3030ee83ad77e5fd7c40238d5ecabe1e6c7da9" dependencies: "@types/lodash" "^4.14.194" "@types/node-fetch" "2.6.3" From ff8d11f50b0c5215dea6ac164930c36c268e38c0 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 26 Dec 2023 13:38:14 +0100 Subject: [PATCH 50/67] Temp: merge with parkedParentSource commit --- .../flows/flow-search-service.ts | 17 ++++++++++++++--- src/domain-services/flows/graphql/types.ts | 3 +++ 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index bb3d19f5..d78a5f42 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -366,8 +366,14 @@ export class FlowSearchService { databaseConnection: Knex, filters: SearchFlowsArgs ): Promise { - const { limit, nextPageCursor, prevPageCursor, sortField, sortOrder } = - filters; + const { + limit, + nextPageCursor, + prevPageCursor, + sortField, + sortOrder, + includeChildrenOfParkedFlows: shouldIncludeChildrenOfParkedFlows, + } = filters; const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); @@ -499,7 +505,8 @@ export class FlowSearchService { let parkedParentSource: FlowParkedParentSource | null = null; const shouldLookAfterParentSource = - flow.activeStatus && flowLink.length > 0; + flowLink.length > 0 && shouldIncludeChildrenOfParkedFlows; + if (shouldLookAfterParentSource) { parkedParentSource = await this.getParketParents( flow, @@ -848,6 +855,7 @@ export class FlowSearchService { const mappedParkedParentOrganizations: FlowParkedParentSource = { organization: [], orgName: [], + abbreviation: [], }; for (const parkedParentOrganization of parkedParentOrganizations) { @@ -857,6 +865,9 @@ export class FlowSearchService { mappedParkedParentOrganizations.orgName.push( parkedParentOrganization.name ); + mappedParkedParentOrganizations.abbreviation.push( + parkedParentOrganization.abbreviation ?? '' + ); } return mappedParkedParentOrganizations; diff --git a/src/domain-services/flows/graphql/types.ts b/src/domain-services/flows/graphql/types.ts index 0a5d6ddd..075bce70 100644 --- a/src/domain-services/flows/graphql/types.ts +++ b/src/domain-services/flows/graphql/types.ts @@ -39,6 +39,9 @@ export class FlowParkedParentSource { @Field(() => [String], { nullable: false }) orgName: string[]; + + @Field(() => [String], { nullable: false }) + abbreviation: string[]; } @ObjectType() From b8a22068246e5eea1987dde853427c0dd081547d Mon Sep 17 00:00:00 2001 From: manelcecs Date: Wed, 27 Dec 2023 15:11:00 +0100 Subject: [PATCH 51/67] Fix searchTotalAmountFlow: only activeStatus true Minor type fixing --- .../flows/flow-search-service.ts | 26 +++++++++----- src/domain-services/flows/graphql/args.ts | 36 ++++++++++--------- .../search-flow-by-filters-strategy-impl.ts | 7 ++-- 3 files changed, 42 insertions(+), 27 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index d78a5f42..0e4c1464 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -21,11 +21,11 @@ import { ReportDetailService } from '../report-details/report-detail-service'; import { type UsageYear } from '../usage-years/grpahql/types'; import { UsageYearService } from '../usage-years/usage-year-service'; import { + SearchFlowsFilters, type FlowCategory, type FlowObjectFilters, type SearchFlowsArgs, type SearchFlowsArgsNonPaginated, - type SearchFlowsFilters, } from './graphql/args'; import { type Flow, @@ -612,7 +612,7 @@ export class FlowSearchService { !isFilterByPendingFlowsDefined; if (isOrderByEntityFlow && (isNoFilterDefined || isFlowFiltersOnly)) { - // use onlyFlowFiltersStrategy + // Use onlyFlowFiltersStrategy return this.onlyFlowFiltersStrategy; } @@ -825,7 +825,7 @@ export class FlowSearchService { } } - const parkedParentFlowObjectsOrganizationSource = []; + const parkedParentFlowObjectsOrganizationSource: FlowObject[] = []; for (const parentFlow of parentFlows) { const parkedParentOrganizationFlowObject = @@ -837,16 +837,19 @@ export class FlowSearchService { versionID: flow.versionID, }, }); - parkedParentFlowObjectsOrganizationSource.push( - parkedParentOrganizationFlowObject - ); + + if (parkedParentOrganizationFlowObject) { + parkedParentFlowObjectsOrganizationSource.push( + parkedParentOrganizationFlowObject + ); + } } const parkedParentOrganizations = await models.organization.find({ where: { id: { [Op.IN]: parkedParentFlowObjectsOrganizationSource.map((flowObject) => - createBrandedValue(flowObject?.objectID) + createBrandedValue((flowObject as FlowObject)?.objectID) ), }, }, @@ -1012,13 +1015,20 @@ export class FlowSearchService { models: Database, args: SearchFlowsArgsNonPaginated ): Promise { + let { flowFilters } = args; const { - flowFilters, flowObjectFilters, flowCategoryFilters, pending: isPendingFlows, } = args; + if (!flowFilters) { + flowFilters = new SearchFlowsFilters(); + flowFilters.activeStatus = true; + } else if (!flowFilters.activeStatus) { + flowFilters.activeStatus = true; + } + const { strategy, conditions } = this.determineStrategy( flowFilters, flowObjectFilters, diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 05f5f8ed..73cd7ebb 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -5,31 +5,33 @@ import { type FlowSortField } from './types'; @InputType() export class SearchFlowsFilters { @Field(() => [Number], { nullable: true }) - id: number[]; + id: number[] | null; - @Field({ nullable: true }) - activeStatus: boolean; + @Field(() => Boolean, { nullable: true }) + activeStatus: boolean | null; - @Field({ nullable: true }) - status: 'commitment' | 'paid' | 'pledged'; + @Field(() => String, { nullable: true }) + status: 'commitment' | 'paid' | 'pledged' | null; - @Field({ nullable: true }) - type: 'carryover' | 'parked' | 'pass_through' | 'standard'; + @Field(() => String, { nullable: true }) + type: 'carryover' | 'parked' | 'pass_through' | 'standard' | null; - @Field({ nullable: true }) - amountUSD: number; + @Field(() => Number, { nullable: true }) + amountUSD: number | null; - @Field({ name: 'reporterRefCode', nullable: true }) - reporterReferenceCode: number; + @Field(() => Number, { name: 'reporterRefCode', nullable: true }) + reporterReferenceCode: number | null; - @Field({ name: 'sourceSystemID', nullable: true }) - sourceSystemId: number; + @Field(() => Number, { name: 'sourceSystemID', nullable: true }) + sourceSystemId: number | null; - @Field({ name: 'legacyID', nullable: true }) - legacyId: number; + @Field(() => Number, { name: 'legacyID', nullable: true }) + legacyId: number | null; - @Field({ nullable: true }) - restricted: boolean; + @Field(() => Boolean, { nullable: true }) + restricted: boolean | null; + + constructor() {} } @InputType() diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 1a2aa44b..8fdf84b1 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -279,8 +279,11 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { }; } - let rawOrderBy = undefined; - let orderByFlow = undefined; + let rawOrderBy: string = ''; + let orderByFlow: { + column: any; + order: any; + } = { column: 'updatedAt', order: 'DESC' }; if (isSortByEntity) { rawOrderBy = `array_position(ARRAY[${sortByFlowIDs.join(',')}], "id")`; } else { From bca2a88655d9d572bf9a4256f122f131127ad6ac Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 28 Dec 2023 12:22:00 +0100 Subject: [PATCH 52/67] Allow sort by nested properties of colections Minor fix when assign plans to flow --- .../flows/flow-search-service.ts | 27 +- src/domain-services/flows/flow-service.ts | 9 +- src/domain-services/flows/model.ts | 1 + .../search-flow-by-filters-strategy-impl.ts | 247 ++++++++---------- src/domain-services/plans/plan-service.ts | 46 ++-- 5 files changed, 163 insertions(+), 167 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 0e4c1464..dd837274 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -634,6 +634,11 @@ export class FlowSearchService { // column: 'organizations.source.name' // or in the format: // column: 'flow.updatedAt' + // or in the format: + // column: 'planVersion.source.name' + // in this last case, we need to look after the capitalized letter + // that will indicate the entity + // and the whole word will be the subEntity const struct = orderBy.column.split('.'); if (struct.length === 2) { @@ -642,12 +647,32 @@ export class FlowSearchService { } else if (struct.length === 3) { orderBy.column = struct[2]; orderBy.direction = struct[1] as FlowNestedDirection; - orderBy.entity = struct[0]; + + // We need to look after the capitalized letter + // that will indicate the entity + // and the whole word will be the subEntity + // Capitalized letter will never be the first letter + const entity = this.getSubstringUntilCapital(struct[0]); + orderBy.entity = entity; + + if (entity === struct[0]) { + orderBy.subEntity = struct[0]; + } } } return orderBy; } + + getSubstringUntilCapital(inputString: string): string { + for (let i = 0; i < inputString.length; i++) { + if (inputString[i] === inputString[i].toUpperCase()) { + return inputString.substring(0, i); + } + } + return inputString; // Return inputString if no capital letter is found + } + prepareFlowConditions(flowFilters: SearchFlowsFilters): any { let flowConditions = {}; diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index e1720408..4980b82b 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -35,21 +35,20 @@ export class FlowService { orderBy: FlowOrderBy, limit: number ): Promise { - const entity = orderBy.entity; + const entity = orderBy.subEntity ?? orderBy.entity; // Get the entity list const mappedOrderBy = mapFlowOrderBy(orderBy); const entityList = await dbConnection .queryBuilder() - .select('id') + .select(orderBy.subEntity ? `${orderBy.subEntity}Id` : 'id') .from(entity) .orderBy(mappedOrderBy.column, mappedOrderBy.order); const entityIDs = entityList.map((entity) => entity.id); - // Get the flowIDs from the entity list // using the flow-object relation - const entityCondKey = entity.toString() as unknown as FlowObjectType; + const entityCondKey = orderBy.entity as unknown as FlowObjectType; const query = dbConnection .queryBuilder() @@ -59,10 +58,10 @@ export class FlowService { .andWhere('objectType', entityCondKey) .andWhere('refDirection', orderBy.direction!) .orderByRaw(`array_position(ARRAY[${entityIDs.join(',')}], "objectID")`) + .orderBy('flowID', orderBy.order) .limit(limit); const flowIDs = await query; - return flowIDs.map((flowID) => flowID.flowID); } } diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 450fe360..55fb4ead 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -7,6 +7,7 @@ export type FlowOrderBy = { column: string; order: SortOrder; entity: string; + subEntity?: string; direction: FlowNestedDirection | undefined; }; diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 8fdf84b1..bf493a7d 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -3,7 +3,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import type Knex from 'knex'; import { Service } from 'typedi'; -import { type FlowService } from '../../flow-service'; +import { FlowService } from '../../flow-service'; import { type FlowCategory, type FlowObjectFilters, @@ -94,8 +94,8 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { flowIDsFromCategoryFilters.push(...flowIDsFromCategoryStrategy.flowIDs); } - // After that, we need to check if we need to filter by flowObjects - // if so, we need to obtain the flowIDs from the flowObjects + // After that, if we need to filter by flowObjects + // Obtain the flowIDs from the flowObjects const isFilterByFlowObjects = flowObjectFilters?.length > 0; const flowIDsFromObjectFilters: FlowId[] = []; @@ -110,118 +110,94 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { flowIDsFromObjectFilters.push(...flowIDsFromObjectStrategy.flowIDs); } - // Apply only filter conditions but not cursor conditions - let countConditions = {}; + // We need to have two conditions, one for the search and one for the count + // 'countConditions' => Apply only filter conditions but not cursor conditions + // 'searchConditions' => Apply both filter and cursor conditions + const { countConditions, searchConditions } = this.buildConditions( + { + isFilterByFlowObjects, + isFilterByCategory, + willSearchPendingFlows: searchPendingFlows, + isSearchByPendingDefined, + }, + { + flowIDsFromCategoryFilters, + flowIDsFromObjectFilters, + flowFilters, + } + ); + + let rawOrderBy: string = ''; + let orderByFlow: + | { + column: any; + order: any; + } + | undefined = { column: 'updatedAt', order: 'DESC' }; + if (isSortByEntity) { + rawOrderBy = `array_position(ARRAY[${sortByFlowIDs.join(',')}], "id")`; + orderByFlow = undefined; + } else { + orderByFlow = mapFlowOrderBy(orderBy); + } + + const [flows, countRes] = await Promise.all([ + this.flowService.getFlows( + models, + searchConditions, + orderByFlow, + limit, + rawOrderBy + ), + this.flowService.getFlowsCount(models, countConditions), + ]); + + // Map count result query to count object + const countObject = mapCountResultToCountObject(countRes); - // Combine cursor condition with flow conditions - let searchConditions = { ...cursorCondition }; + return { flows, count: countObject.count }; + } + buildConditions( + decisionArgs: { + isFilterByFlowObjects: boolean; + isFilterByCategory: boolean; + willSearchPendingFlows: boolean | undefined; + isSearchByPendingDefined: boolean; + }, + filterArgs: { + flowIDsFromCategoryFilters: FlowId[]; + flowIDsFromObjectFilters: FlowId[]; + flowFilters: any | undefined; + } + ): { countConditions: any; searchConditions: any } { + const { + isFilterByFlowObjects, + isFilterByCategory, + willSearchPendingFlows, + isSearchByPendingDefined, + } = decisionArgs; + const { + flowIDsFromCategoryFilters, + flowIDsFromObjectFilters, + flowFilters, + } = filterArgs; + let countConditions: any = {}; + let searchConditions: any = {}; // Check if we have flowIDs from flowObjects and flowCategoryFilters // if so, we need to filter by those flowIDs - if (isFilterByFlowObjects && isFilterByCategory) { - if (searchPendingFlows === true) { - // We need to combine the flowIDs from flowObjects and flowCategoryFilters - // to have the least amount of flowIDs to filter - const setOfFlowIDs = new Set([ - ...flowIDsFromCategoryFilters, - ...flowIDsFromObjectFilters, - ]); - const flowIDsFromFilteredFlowObjects = [...setOfFlowIDs]; - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: flowIDsFromFilteredFlowObjects, - }, - }, - ], - }; - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: flowIDsFromFilteredFlowObjects, - }, - }, - ], - }; - } else { - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [Op.NOT_IN]: flowIDsFromCategoryFilters, - [Op.IN]: flowIDsFromObjectFilters, - }, - }, - ], - }; - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - [Op.NOT_IN]: flowIDsFromCategoryFilters, - [Op.IN]: flowIDsFromObjectFilters, - }, - }, - ], - }; - } - } else if (isFilterByCategory && isSearchByPendingDefined) { - if (searchPendingFlows === true) { - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: flowIDsFromCategoryFilters, - }, - }, - ], - }; - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: flowIDsFromCategoryFilters, - }, - }, - ], - }; - } else { - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [Op.NOT_IN]: flowIDsFromCategoryFilters, - }, - }, - ], - }; - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - [Op.NOT_IN]: flowIDsFromCategoryFilters, - }, - }, - ], - }; - } - } else if (isFilterByCategory && !isSearchByPendingDefined) { + if ( + (isFilterByFlowObjects || isFilterByCategory) && + isSearchByPendingDefined + ) { + const deduplicatedFlowIDs = [...new Set(flowIDsFromCategoryFilters)]; + searchConditions = { ...searchConditions, [Cond.AND]: [ { id: { - [Op.IN]: flowIDsFromCategoryFilters, + [Op.IN]: deduplicatedFlowIDs, }, }, ], @@ -231,7 +207,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { [Cond.AND]: [ { id: { - [Op.IN]: flowIDsFromCategoryFilters, + [Op.IN]: deduplicatedFlowIDs, }, }, ], @@ -257,6 +233,34 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { }, ], }; + } else if (isFilterByCategory || isSearchByPendingDefined) { + const idCondition = isSearchByPendingDefined + ? willSearchPendingFlows + ? Op.IN + : Op.NOT_IN + : Op.IN; + + searchConditions = { + ...searchConditions, + [Cond.AND]: [ + { + id: { + [idCondition]: flowIDsFromCategoryFilters, + }, + }, + ], + }; + + countConditions = { + ...countConditions, + [Cond.AND]: [ + { + id: { + idCondition: flowIDsFromCategoryFilters, + }, + }, + ], + }; } // After adding the where clauses form the filters @@ -278,33 +282,6 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { [Cond.AND]: [flowConditions ?? {}], }; } - - let rawOrderBy: string = ''; - let orderByFlow: { - column: any; - order: any; - } = { column: 'updatedAt', order: 'DESC' }; - if (isSortByEntity) { - rawOrderBy = `array_position(ARRAY[${sortByFlowIDs.join(',')}], "id")`; - } else { - orderByFlow = mapFlowOrderBy(orderBy); - } - - // Temporal - const [flows, countRes] = await Promise.all([ - this.flowService.getFlows( - models, - searchConditions, - orderByFlow, - limit, - rawOrderBy - ), - this.flowService.getFlowsCount(models, countConditions), - ]); - - // Map count result query to count object - const countObject = mapCountResultToCountObject(countRes); - - return { flows, count: countObject.count }; + return { countConditions, searchConditions }; } } diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index 2f162110..2920b8c9 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -74,32 +74,26 @@ export class PlanService { }, }); - const planFlowObject = plansFO.find( - (planFO) => planFO.objectID === plan.id - ); - - if (planVersion.length && planFlowObject) { - const flowId = planFlowObject && planFlowObject.flowID; - - if (!plansMap.has(flowId)) { - plansMap.set(flowId, []); - } - - const plansPerFlow = plansMap.get(flowId)!; - - if ( - !plansPerFlow.some( - (plan) => - plan.id === plan.id && - plan.direction === planFlowObject.refDirection - ) - ) { - const planMapped = this.mapPlansToFlowPlans( - plan, - planVersion[0], - planFlowObject?.refDirection ?? null - ); - plansPerFlow.push(planMapped); + for (const planFO of plansFO) { + if (planFO.objectID === plan.id) { + const flowId = planFO.flowID; + if (!plansMap.has(flowId)) { + plansMap.set(flowId, []); + } + const plansPerFlow = plansMap.get(flowId)!; + if ( + !plansPerFlow.some( + (plan) => + plan.id === plan.id && plan.direction === planFO.refDirection + ) + ) { + const planMapped = this.mapPlansToFlowPlans( + plan, + planVersion[0], + planFO?.refDirection ?? null + ); + plansPerFlow.push(planMapped); + } } } } From 332accd874b894c282515a23dc12fa96df44a83b Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 28 Dec 2023 14:45:00 +0100 Subject: [PATCH 53/67] Definitive usage of search V2 method Removed old search and rename 'searchV2' to 'search' --- .../flows/flow-search-service.ts | 411 ++---------------- src/domain-services/flows/flow-service.ts | 9 +- src/domain-services/flows/graphql/args.ts | 38 +- src/domain-services/flows/graphql/resolver.ts | 14 +- .../flows/strategy/flow-search-strategy.ts | 35 +- .../flow-object-conditions-strategy-impl.ts | 158 ------- .../only-flow-conditions-strategy-impl.ts | 48 +- .../search-flow-by-filters-strategy-impl.ts | 47 +- 8 files changed, 112 insertions(+), 648 deletions(-) delete mode 100644 src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index dd837274..b3d1bbfa 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -41,7 +41,6 @@ import { type FlowOrderBy, } from './model'; import { type FlowSearchStrategy } from './strategy/flow-search-strategy'; -import { FlowObjectFiltersStrategy } from './strategy/impl/flow-object-conditions-strategy-impl'; import { OnlyFlowFiltersStrategy } from './strategy/impl/only-flow-conditions-strategy-impl'; import { SearchFlowByFiltersStrategy } from './strategy/impl/search-flow-by-filters-strategy-impl'; @@ -49,7 +48,6 @@ import { SearchFlowByFiltersStrategy } from './strategy/impl/search-flow-by-filt export class FlowSearchService { constructor( private readonly onlyFlowFiltersStrategy: OnlyFlowFiltersStrategy, - private readonly flowObjectFiltersStrategy: FlowObjectFiltersStrategy, private readonly searchFlowByFiltersStrategy: SearchFlowByFiltersStrategy, private readonly organizationService: OrganizationService, private readonly locationService: LocationService, @@ -63,305 +61,6 @@ export class FlowSearchService { ) {} async search( - models: Database, - filters: SearchFlowsArgs - ): Promise { - const { - limit, - nextPageCursor, - prevPageCursor, - sortField, - sortOrder, - pending: isPendingFlows, - } = filters; - - const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); - - const { flowFilters, flowObjectFilters, flowCategoryFilters } = filters; - - const cursorCondition = this.buildCursorCondition( - prevPageCursor, - nextPageCursor, - orderBy - ); - - // Determine strategy of how to search for flows - const { strategy, conditions } = this.determineStrategy( - flowFilters, - flowObjectFilters, - flowCategoryFilters, - isPendingFlows - ); - - // Fetch one more item to check for hasNextPage - const limitComputed = limit + 1; - - // Obtain flows and its count based on the strategy selected - const { flows, count } = await strategy.search( - conditions, - models, - orderBy, - limitComputed, - cursorCondition, - isPendingFlows - ); - - // Remove the extra item used to check hasNextPage - const hasNextPage = flows.length > limit; - if (hasNextPage) { - flows.pop(); - } - - const flowIds: FlowId[] = []; - const flowWithVersion: Map = new Map(); - - // Obtain flow IDs and flow version IDs - for (const flow of flows) { - flowIds.push(flow.id); - if (!flowWithVersion.has(flow.id)) { - flowWithVersion.set(flow.id, []); - } - const flowVersionIDs = flowWithVersion.get(flow.id)!; - flowVersionIDs.push(flow.versionID); - } - - // Obtain external references and flow objects in parallel - const [externalReferencesMap, flowObjects] = await Promise.all([ - this.externalReferenceService.getExternalReferencesForFlows( - flowIds, - models - ), - this.flowObjectService.getFlowObjectByFlowId(models, flowIds), - ]); - - // Map flow objects to their respective arrays - const organizationsFO: FlowObject[] = []; - const locationsFO: FlowObject[] = []; - const plansFO: FlowObject[] = []; - const usageYearsFO: FlowObject[] = []; - - this.groupByFlowObjectType( - flowObjects, - organizationsFO, - locationsFO, - plansFO, - usageYearsFO - ); - - // Obtain flow links - const flowLinksMap = await this.flowLinkService.getFlowLinksForFlows( - flowIds, - models - ); - - // Perform all nested queries in parallel - const [ - categoriesMap, - organizationsMap, - locationsMap, - plansMap, - usageYearsMap, - reportDetailsMap, - ] = await Promise.all([ - this.categoryService.getCategoriesForFlows(flowWithVersion, models), - this.organizationService.getOrganizationsForFlows( - organizationsFO, - models - ), - this.locationService.getLocationsForFlows(locationsFO, models), - this.planService.getPlansForFlows(plansFO, models), - this.usageYearService.getUsageYearsForFlows(usageYearsFO, models), - this.reportDetailService.getReportDetailsForFlows(flowIds, models), - ]); - - const items = await Promise.all( - flows.map(async (flow) => { - const flowLink = flowLinksMap.get(flow.id) ?? []; - - // Categories Map follows the structure: - // flowID: { versionID: [categories]} - // So we need to get the categories for the flow version - const categories = - categoriesMap.get(flow.id)!.get(flow.versionID) ?? []; - const organizations = organizationsMap.get(flow.id) ?? []; - const locations = locationsMap.get(flow.id) ?? []; - const plans = plansMap.get(flow.id) ?? []; - const usageYears = usageYearsMap.get(flow.id) ?? []; - const externalReferences = externalReferencesMap.get(flow.id) ?? []; - const reportDetails = reportDetailsMap.get(flow.id) ?? []; - - const reportDetailsWithChannel = - this.reportDetailService.addChannelToReportDetails( - reportDetails, - categories - ); - - let parkedParentSource: FlowParkedParentSource | null = null; - if (flow.activeStatus && flowLink.length > 0) { - parkedParentSource = await this.getParketParents( - flow, - flowLink, - models - ); - } - - const childIDs: number[] = - (flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.parentID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.childID.valueOf()) as number[]) ?? []; - - const parentIDs: number[] = - (flowLinksMap - .get(flow.id) - ?.filter( - (flowLink) => flowLink.childID === flow.id && flowLink.depth > 0 - ) - .map((flowLink) => flowLink.parentID.valueOf()) as number[]) ?? []; - - return this.buildFlowDTO( - flow, - categories, - organizations, - locations, - plans, - usageYears, - childIDs, - parentIDs, - externalReferences, - reportDetailsWithChannel, - parkedParentSource - ); - }) - ); - - // Sort items - // FIXME: this sorts the page, not the whole result set - items.sort((a: Flow, b: Flow) => { - const entityKey = orderBy.entity as keyof Flow; - - const nestedA = a[entityKey]; - const nestedB = b[entityKey]; - - if (nestedA && nestedB) { - if (orderBy.direction) { - // This means the orderBy came in the format: - // column: 'nestedEntity.direction.property' - // So we need to get the entry of the nested entity - // which its direction matches the orderBy direction - // and sort by the property using the orderBy order - - // Fisrt, check if the nestedEntity is trusy an Array - if (!Array.isArray(nestedA)) { - return 0; - } - if (!Array.isArray(nestedB)) { - return 0; - } - - // Now we ensure both properties are arrays - // we can assume that the nestedEntity is one of the following: - // organizations, locations, plans, usageYears - const directionEntityA = nestedA as unknown as - | Organization[] - | BaseLocation[] - | BasePlan[] - | UsageYear[]; - const directionEntityB = nestedB as unknown as - | Organization[] - | BaseLocation[] - | BasePlan[] - | UsageYear[]; - - // Then we find the entry of the nestedEntity that matches the orderBy direction - const nestedEntityA = directionEntityA.find( - (nestedEntity: any) => orderBy.direction === nestedEntity.direction - ); - const nestedEntityB = directionEntityB.find( - (nestedEntity: any) => orderBy.direction === nestedEntity.direction - ); - - // After, we need to check there is an entry that matches the orderBy direction - // if not, we return 0 - if (!nestedEntityA) { - return 0; - } - if (!nestedEntityB) { - return 0; - } - - // Now we can sort by the property using the orderBy order - const propertyA = - nestedEntityA[orderBy.column as keyof typeof nestedEntityA]; - const propertyB = - nestedEntityB[orderBy.column as keyof typeof nestedEntityB]; - - // Finally, we check that the property is defined - // and if so - we sort by the property using the orderBy order - if (propertyA && propertyB) { - if (orderBy.order === 'asc') { - return propertyA > propertyB ? 1 : -1; - } - return propertyA < propertyB ? 1 : -1; - } - } - // Since there is no direction expecified in the orderBy - // we can assume that the nestedEntity is one of the following: - // childIDs, parentIDs, externalReferences, reportDetails, parkedParentSource, categories - // and we can sort by the property using the orderBy order - const propertyA = nestedA[orderBy.column as keyof typeof nestedA]; - const propertyB = nestedB[orderBy.column as keyof typeof nestedB]; - - // Finally, we check that the property is defined - // and if so - we sort by the property using the orderBy order - if (propertyA && propertyB) { - if (orderBy.order === 'asc') { - return propertyA > propertyB ? 1 : -1; - } - return propertyA < propertyB ? 1 : -1; - } - } - - return 0; - }); - - const isOrderByForFlows = orderBy.entity === 'flow'; - const firstItem = items[0]; - const prevPageCursorEntity = isOrderByForFlows - ? firstItem - : firstItem[orderBy.entity as keyof typeof firstItem]; - const prevPageCursorValue = prevPageCursorEntity - ? prevPageCursorEntity[ - orderBy.column as keyof typeof prevPageCursorEntity - ] ?? '' - : ''; - - const lastItem = items.at(-1); - const nextPageCursorEntity = isOrderByForFlows - ? lastItem - : lastItem![orderBy.entity as keyof typeof lastItem]; - const nextPageCursorValue = nextPageCursorEntity - ? nextPageCursorEntity[ - orderBy.column as keyof typeof nextPageCursorEntity - ]?.toString() ?? '' - : ''; - - return { - flows: items, - hasNextPage: limit <= flows.length, - hasPreviousPage: nextPageCursor !== undefined, - prevPageCursor: prevPageCursorValue, - nextPageCursor: nextPageCursorValue, - pageSize: flows.length, - sortField: `${orderBy.entity}.${orderBy.column}` as FlowSortField, - sortOrder: sortOrder ?? 'desc', - total: count, - }; - } - - async searchV2( models: Database, databaseConnection: Knex, filters: SearchFlowsArgs @@ -372,7 +71,7 @@ export class FlowSearchService { prevPageCursor, sortField, sortOrder, - includeChildrenOfParkedFlows: shouldIncludeChildrenOfParkedFlows, + shouldIncludeChildrenOfParkedFlows, } = filters; const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); @@ -386,7 +85,7 @@ export class FlowSearchService { // Once we've gathered all the filters, we need to determine the strategy // to use in order to obtain the flowIDs - const strategy: FlowSearchStrategy = this.determineStrategyV2( + const strategy: FlowSearchStrategy = this.determineStrategy( flowFilters, flowObjectFilters, flowCategoryFilters, @@ -401,7 +100,7 @@ export class FlowSearchService { orderBy ); - const { flows, count } = await strategy.searchV2( + const { flows, count } = await strategy.search({ models, databaseConnection, limit, @@ -410,8 +109,8 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, - isPendingFlows - ); + searchPendingFlows: isPendingFlows, + }); // Remove the extra item used to check hasNextPage const hasNextPage = flows.length > limit; @@ -581,12 +280,12 @@ export class FlowSearchService { }; } - determineStrategyV2( + determineStrategy( flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], flowCategoryFilters: FlowCategory[], isPendingFlows: boolean, - orderBy: FlowOrderBy + orderBy?: FlowOrderBy ) { // If there are no filters (flowFilters, flowObjectFilters, flowCategoryFilters or pending) // and there is no sortByEntity (orderBy.entity === 'flow') @@ -594,7 +293,7 @@ export class FlowSearchService { // If there are no sortByEntity (orderBy.entity === 'flow') // but flowFilters only // use onlyFlowFiltersStrategy - const isOrderByEntityFlow = orderBy.entity === 'flow'; + const isOrderByEntityFlow = orderBy?.entity === 'flow'; const isFlowFiltersDefined = flowFilters !== undefined; const isFlowObjectFiltersDefined = flowObjectFilters !== undefined; const isFlowCategoryFiltersDefined = flowCategoryFilters !== undefined; @@ -725,68 +424,6 @@ export class FlowSearchService { return flowObjectsConditions; } - determineStrategy( - flowFilters: SearchFlowsFilters, - flowObjectFilters: FlowObjectFilters[], - flowCategoryFilters: FlowCategory[], - isFilterByPendingFlows: boolean - ): { strategy: FlowSearchStrategy; conditions: any } { - const isFlowFilterDefined = flowFilters !== undefined; - const isFlowObjectFilterDefined = flowObjectFilters !== undefined; - const isFlowObjectFiltersNotEmpty = - isFlowObjectFilterDefined && flowObjectFilters.length !== 0; - - const isFlowCategoryFilterDefined = flowCategoryFilters !== undefined; - const isFlowCategoryFilterNotEmpty = - isFlowCategoryFilterDefined && flowCategoryFilters.length !== 0; - - const isFilterByPendingFlowsDefined = isFilterByPendingFlows !== undefined; - if ( - (!isFlowFilterDefined && - (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && - !isFlowCategoryFilterNotEmpty && - !isFilterByPendingFlowsDefined) || - (isFlowFilterDefined && - (!isFlowObjectFilterDefined || !isFlowObjectFiltersNotEmpty) && - !isFlowCategoryFilterNotEmpty && - !isFilterByPendingFlowsDefined) - ) { - const flowConditions = this.prepareFlowConditions(flowFilters); - return { - strategy: this.onlyFlowFiltersStrategy, - conditions: flowConditions, - }; - } else if ( - isFlowObjectFiltersNotEmpty || - isFlowCategoryFilterNotEmpty || - isFilterByPendingFlowsDefined - ) { - const flowConditions = this.prepareFlowConditions(flowFilters); - const flowObjectConditions = - this.prepareFlowObjectConditions(flowObjectFilters); - - return { - strategy: this.flowObjectFiltersStrategy, - conditions: { - conditionsMap: this.buildConditionsMap( - flowConditions, - flowObjectConditions - ), - flowCategoryFilters, - }, - }; - } - - throw new Error('Invalid combination of flowFilters and flowObjectFilters'); - } - - private buildConditionsMap(flowConditions: any, flowObjectConditions: any) { - const conditionsMap = new Map(); - conditionsMap.set('flowObjects', flowObjectConditions); - conditionsMap.set('flow', flowConditions); - return conditionsMap; - } - private groupByFlowObjectType( flowObjects: FlowObject[], organizationsFO: FlowObject[], @@ -1038,6 +675,7 @@ export class FlowSearchService { async searchTotalAmount( models: Database, + databaseConnection: Knex, args: SearchFlowsArgsNonPaginated ): Promise { let { flowFilters } = args; @@ -1054,21 +692,23 @@ export class FlowSearchService { flowFilters.activeStatus = true; } - const { strategy, conditions } = this.determineStrategy( + // Once we've gathered all the filters, we need to determine the strategy + // to use in order to obtain the flowIDs + const strategy: FlowSearchStrategy = this.determineStrategy( flowFilters, flowObjectFilters, flowCategoryFilters, isPendingFlows ); - const { flows, count } = await strategy.search( - conditions, + const { flows, count } = await strategy.search({ models, - undefined, - undefined, - undefined, - isPendingFlows - ); + databaseConnection, + flowFilters, + flowObjectFilters, + flowCategoryFilters, + searchPendingFlows: isPendingFlows, + }); const flowsAmountUSD: Array = flows.map( (flow) => flow.amountUSD @@ -1087,9 +727,14 @@ export class FlowSearchService { async searchBatches( models: Database, + databaseConnection: Knex, args: SearchFlowsArgs ): Promise { - const flowSearchResponse = await this.search(models, args); + const flowSearchResponse = await this.search( + models, + databaseConnection, + args + ); const flows: Flow[] = flowSearchResponse.flows; @@ -1100,7 +745,11 @@ export class FlowSearchService { let nextFlowSearchResponse: FlowSearchResult; while (hasNextPage) { - nextFlowSearchResponse = await this.search(models, nextArgs); + nextFlowSearchResponse = await this.search( + models, + databaseConnection, + nextArgs + ); flows.push(...nextFlowSearchResponse.flows); diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 4980b82b..e08c44c7 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -33,7 +33,7 @@ export class FlowService { models: Database, dbConnection: Knex, orderBy: FlowOrderBy, - limit: number + limit?: number ): Promise { const entity = orderBy.subEntity ?? orderBy.entity; @@ -58,8 +58,11 @@ export class FlowService { .andWhere('objectType', entityCondKey) .andWhere('refDirection', orderBy.direction!) .orderByRaw(`array_position(ARRAY[${entityIDs.join(',')}], "objectID")`) - .orderBy('flowID', orderBy.order) - .limit(limit); + .orderBy('flowID', orderBy.order); + + if (limit) { + query.limit(limit); + } const flowIDs = await query; return flowIDs.map((flowID) => flowID.flowID); diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 73cd7ebb..9ab22ce2 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -2,6 +2,36 @@ import { ArgsType, Field, InputType } from 'type-graphql'; import { PaginationArgs } from '../../../utils/graphql/pagination'; import { type FlowSortField } from './types'; +@InputType() +export class FlowStatusFilters { + @Field(() => Boolean, { nullable: true }) + pending: boolean | null; + + @Field(() => Boolean, { nullable: true }) + commitment: boolean | null; + + @Field(() => Boolean, { nullable: true }) + paid: boolean | null; + + @Field(() => Boolean, { nullable: true }) + pledged: boolean | null; +} + +@InputType() +export class FlowTypeFilters { + @Field(() => Boolean, { nullable: true }) + carryover: boolean | null; + + @Field(() => Boolean, { nullable: true }) + parked: boolean | null; + + @Field(() => Boolean, { nullable: true }) + pass_through: boolean | null; + + @Field(() => Boolean, { nullable: true }) + standard: boolean | null; +} + @InputType() export class SearchFlowsFilters { @Field(() => [Number], { nullable: true }) @@ -80,8 +110,8 @@ export class SearchFlowsArgs extends PaginationArgs { @Field(() => [FlowObjectFilters], { nullable: true }) flowObjectFilters: FlowObjectFilters[]; - @Field({ nullable: true }) - includeChildrenOfParkedFlows: boolean; + @Field({ name: 'includeChildrenOfParkedFlows', nullable: true }) + shouldIncludeChildrenOfParkedFlows: boolean; @Field(() => [FlowCategory], { nullable: true }) flowCategoryFilters: FlowCategory[]; @@ -98,8 +128,8 @@ export class SearchFlowsArgsNonPaginated { @Field(() => [FlowObjectFilters], { nullable: true }) flowObjectFilters: FlowObjectFilters[]; - @Field({ nullable: true }) - includeChildrenOfParkedFlows: boolean; + @Field({ name: 'includeChildrenOfParkedFlows', nullable: true }) + shouldIncludeChildrenOfParkedFlows: boolean; @Field(() => [FlowCategory], { nullable: true }) flowCategoryFilters: FlowCategory[]; diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index a44538e7..78c4d9d5 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -21,7 +21,7 @@ export default class FlowResolver { @Args(() => SearchFlowsArgs, { validate: false }) args: SearchFlowsArgs ): Promise { - return await this.flowSearchService.searchV2( + return await this.flowSearchService.search( context.models, context.connection, args @@ -34,7 +34,11 @@ export default class FlowResolver { @Args(() => SearchFlowsArgsNonPaginated, { validate: false }) args: SearchFlowsArgsNonPaginated ): Promise { - return await this.flowSearchService.searchTotalAmount(context.models, args); + return await this.flowSearchService.searchTotalAmount( + context.models, + context.connection, + args + ); } @Query(() => FlowSearchResultNonPaginated) @@ -45,6 +49,10 @@ export default class FlowResolver { ): Promise { // Set default batch size to 1000 args.limit = args.limit > 0 ? args.limit : 1000; - return await this.flowSearchService.searchBatches(context.models, args); + return await this.flowSearchService.searchBatches( + context.models, + context.connection, + args + ); } } diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 9dfa94d3..4264cf70 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -12,27 +12,18 @@ export interface FlowSearchStrategyResponse { count: number; } -export interface FlowSearchStrategy { - search( - flowConditions: - | Map - | { conditionsMap: Map; flowCategoryFilters: any }, - models: Database, - orderBy?: any, - limit?: number, - cursorCondition?: any, - filterByPendingFlows?: boolean - ): Promise; +export interface FlowSearchArgs { + models: Database; + databaseConnection: Knex; + flowFilters: SearchFlowsFilters; + flowObjectFilters: FlowObjectFilters[]; + flowCategoryFilters: FlowCategory[]; + limit?: number; + orderBy?: any; + cursorCondition?: any; + searchPendingFlows?: boolean; +} - searchV2( - models: Database, - databaseConnection: Knex, - limit: number, - orderBy: any, - cursorCondition: any | undefined, - flowFilters: SearchFlowsFilters, - flowObjectFilters: FlowObjectFilters[], - flowCategoryFilters: FlowCategory[], - searchPendingFlows: boolean | undefined - ): Promise; +export interface FlowSearchStrategy { + search(args: FlowSearchArgs): Promise; } diff --git a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts deleted file mode 100644 index 234a8ee9..00000000 --- a/src/domain-services/flows/strategy/impl/flow-object-conditions-strategy-impl.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; -import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { Service } from 'typedi'; -import { FlowService } from '../../flow-service'; -import { type FlowCategory } from '../../graphql/args'; -import { - type FlowSearchStrategy, - type FlowSearchStrategyResponse, -} from '../flow-search-strategy'; -import { - type FlowIDSearchStrategy, - type FlowIdSearchStrategyResponse, -} from '../flowID-search-strategy'; -import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; -import { GetFlowIdsFromMixedConditionsStrategyImpl } from './get-flowIds-flow-mixed-conditions-strategy-impl'; -import { mapFlowOrderBy } from './utils'; - -@Service() -export class FlowObjectFiltersStrategy implements FlowSearchStrategy { - constructor( - private readonly flowService: FlowService, - private readonly getFlowIdsFromObjectConditions: GetFlowIdsFromMixedConditionsStrategyImpl, - private readonly getFlowIdsFromCategoryConditions: GetFlowIdsFromCategoryConditionsStrategyImpl, - private readonly getFlowIdsFromMixedConditions: GetFlowIdsFromMixedConditionsStrategyImpl - ) {} - - async search( - flowConditions: { - conditionsMap: Map; - flowCategoryFilters: FlowCategory[]; - }, - models: Database, - orderBy?: any, - limit?: number, - cursorCondition?: any, - filterByPendingFlows?: boolean - ): Promise { - const flowConditionsMap = flowConditions.conditionsMap; - // Obtain flowObjects conditions - const flowObjectsConditions: Map< - string, - Map - > = flowConditionsMap.get('flowObjects') ?? new Map(); - - // Obtain flow conditions - const flowEntityConditions = flowConditionsMap.get('flow') ?? new Map(); - - // Obtain flowCategory conditions - const flowCategoryConditions = flowConditions.flowCategoryFilters ?? []; - - const searchFlowIdsStrategy: FlowIDSearchStrategy = this.determineStrategy( - flowObjectsConditions, - flowCategoryConditions, - filterByPendingFlows - ); - - const { flowIDs: flowIdsToFilter }: FlowIdSearchStrategyResponse = - await searchFlowIdsStrategy.search( - models, - flowObjectsConditions, - flowCategoryConditions, - filterByPendingFlows - ); - - const whereClauseFromStrategy = searchFlowIdsStrategy.generateWhereClause( - flowIdsToFilter, - flowCategoryConditions, - filterByPendingFlows - ); - - // Combine conditions from flowObjects FlowIDs and flow conditions - const countConditions = { - [Cond.AND]: [flowEntityConditions ?? {}, whereClauseFromStrategy ?? {}], - }; - - // Combine cursor condition with flow conditions - const searchConditions = { - [Cond.AND]: [ - flowEntityConditions ?? {}, - cursorCondition ?? {}, - whereClauseFromStrategy ?? {}, - ], - }; - - // check and map orderBy to be from entity 'flow' - const orderByFlow = mapFlowOrderBy(orderBy); - - // Obtain flows and flowCount based on flowIDs from filtered flowObjects - // and flow conditions - const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, searchConditions, orderByFlow, limit), - this.flowService.getFlowsCount(models, countConditions), - ]); - - // Map count result query to count object - const countObject = countRes[0] as { count: number }; - - return { flows, count: countObject.count }; - } - - // Determine the strategy to use in order to obtain flowIDs - // aiming to have the least amount of flowIDs to filter - // in the next step - // If there are flowObjects conditions - // use flowObjects strategy - // otherwise use flowCategories strategy - // If there are both flowObjects and flowCategories conditions - // use both and merge the results keeping only flowIDs - // present in both arrays - // otherwise keep all flowIDs from the one that is not empty - determineStrategy( - flowObjectsConditions: Map>, - flowCategoryConditions: any, - filterByPendingFlows?: boolean - ): any { - const isFlowObjectsConditionsIsDefined = - flowObjectsConditions !== undefined; - const isFlowCategoryConditionsIsDefined = - flowCategoryConditions !== undefined; - const isFilterByPendingFlowsIsDefined = filterByPendingFlows !== undefined; - - const flowObjectsConditionsIsNotEmpty = - isFlowObjectsConditionsIsDefined && flowObjectsConditions.size; - const isFlowCategoryConditionsIsNotEmpty = - isFlowCategoryConditionsIsDefined && flowCategoryConditions.length !== 0; - - if ( - flowObjectsConditionsIsNotEmpty && - (isFlowCategoryConditionsIsNotEmpty || isFilterByPendingFlowsIsDefined) - ) { - return this.getFlowIdsFromMixedConditions; - } else if (flowObjectsConditionsIsNotEmpty) { - return this.getFlowIdsFromObjectConditions; - } else if ( - isFlowCategoryConditionsIsNotEmpty || - isFilterByPendingFlowsIsDefined - ) { - return this.getFlowIdsFromCategoryConditions; - } - throw new Error( - 'No strategy found for flowObjectsConditions and flowCategoryConditions' - ); - } - - searchV2( - _models: Database, - _databaseConnection: any, - _limit: number, - _orderBy: any, - _cursorCondition: any, - _flowFilters: any, - _flowObjectFilters: any, - _flowCategoryFilters: any, - _filterByPendingFlows?: boolean - ): Promise { - throw new Error('Method not implemented.'); - } -} diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts index 6ca0d548..d77cbf8d 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts @@ -1,15 +1,8 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; -import type Knex from 'knex'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; import { - type FlowCategory, - type FlowObjectFilters, - type SearchFlowsFilters, -} from '../../graphql/args'; -import { type FlowOrderBy } from '../../model'; -import { + type FlowSearchArgs, type FlowSearchStrategy, type FlowSearchStrategyResponse, } from '../flow-search-strategy'; @@ -23,43 +16,8 @@ import { export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { constructor(private readonly flowService: FlowService) {} - async search( - flowConditions: any, - models: Database, - orderBy?: any, - limit?: number, - cursorCondition?: any - ): Promise { - // Build conditions object - const searchConditions = { - [Cond.AND]: [flowConditions ?? {}, cursorCondition ?? {}], - }; - - // check and map orderBy to be from entity 'flow' - const orderByFlow = mapFlowOrderBy(orderBy); - - const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, searchConditions, orderByFlow, limit), - this.flowService.getFlowsCount(models, flowConditions), - ]); - - // Map count result query to count object - const countObject = countRes[0] as { count: number }; - - return { flows, count: countObject.count }; - } - - async searchV2( - models: Database, - _databaseConnection: Knex, - limit: number, - orderBy: FlowOrderBy, - cursorCondition: any | undefined, - flowFilters: SearchFlowsFilters, - _flowObjectFilters: FlowObjectFilters[], - _flowCategoryFilters: FlowCategory[], - _searchPendingFlows: boolean | undefined - ): Promise { + async search(args: FlowSearchArgs): Promise { + const { models, flowFilters, orderBy, limit, cursorCondition } = args; // Map flowConditions to where clause const flowConditions = prepareFlowConditions(flowFilters); diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index bf493a7d..34d69c1c 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -1,16 +1,9 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import type Knex from 'knex'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; import { - type FlowCategory, - type FlowObjectFilters, - type SearchFlowsFilters, -} from '../../graphql/args'; -import { type FlowOrderBy } from '../../model'; -import { + type FlowSearchArgs, type FlowSearchStrategy, type FlowSearchStrategyResponse, } from '../flow-search-strategy'; @@ -32,28 +25,18 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { private readonly getFlowIdsFromObjectConditions: GetFlowIdsFromObjectConditionsStrategyImpl ) {} - search( - _flowConditions: any, - _models: Database, - _orderBy?: any, - _limit?: number, - _cursorCondition?: any, - _filterByPendingFlows?: boolean - ): Promise { - throw new Error('Method not implemented.'); - } + async search(args: FlowSearchArgs): Promise { + const { + models, + databaseConnection, + flowFilters, + flowObjectFilters, + flowCategoryFilters, + orderBy, + limit, + searchPendingFlows: isSearchPendingFlows, + } = args; - async searchV2( - models: Database, - databaseConnection: Knex, - limit: number, - orderBy: FlowOrderBy, - cursorCondition: any | undefined, - flowFilters: SearchFlowsFilters, - flowObjectFilters: FlowObjectFilters[], - flowCategoryFilters: FlowCategory[], - searchPendingFlows: boolean | undefined - ): Promise { // First, we need to check if we need to sort by a certain entity // and if so, we need to map the orderBy to be from that entity // obtain the entities relation to the flow @@ -76,7 +59,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // Now we need to check if we need to filter by category // if it's using the shorcut 'pending' // or if there are any flowCategoryFilters - const isSearchByPendingDefined = searchPendingFlows !== undefined; + const isSearchByPendingDefined = isSearchPendingFlows !== undefined; const isFilterByCategory = isSearchByPendingDefined || flowCategoryFilters?.length > 0; @@ -89,7 +72,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { models, new Map(), flowCategoryFilters ?? [], - searchPendingFlows + isSearchPendingFlows ); flowIDsFromCategoryFilters.push(...flowIDsFromCategoryStrategy.flowIDs); } @@ -117,7 +100,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { { isFilterByFlowObjects, isFilterByCategory, - willSearchPendingFlows: searchPendingFlows, + willSearchPendingFlows: isSearchPendingFlows, isSearchByPendingDefined, }, { From cf90db4f9b20f516a2f11eed3e01eff9778bffc4 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 2 Jan 2024 09:01:31 +0100 Subject: [PATCH 54/67] Add shortcuts for category filters --- package.json | 2 +- .../flows/flow-search-service.ts | 125 ++++++++++++++++-- src/domain-services/flows/graphql/args.ts | 76 ++++++----- .../flows/strategy/flow-search-strategy.ts | 2 +- .../flows/strategy/flowID-search-strategy.ts | 14 +- ...-flow-category-conditions-strategy-impl.ts | 14 +- ...Ids-flow-mixed-conditions-strategy-impl.ts | 59 --------- ...ds-flow-object-conditions-strategy-impl.ts | 8 +- .../search-flow-by-filters-strategy-impl.ts | 71 +++++----- .../flows/strategy/impl/utils.ts | 9 +- yarn.lock | 4 +- 11 files changed, 221 insertions(+), 163 deletions(-) delete mode 100644 src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts diff --git a/package.json b/package.json index 54b9f0b7..d2566809 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "lint": "yarn lint-prettier && yarn lint-eslint" }, "dependencies": { - "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#3a3030ee83ad77e5fd7c40238d5ecabe1e6c7da9", + "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#e298382f38848370c6daa0ac86b2016eddbef356", "apollo-server-hapi": "^3.12.0", "bunyan": "^1.8.15", "class-validator": "^0.14.0", diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index b3d1bbfa..f3bfb9fc 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -81,15 +81,43 @@ export class FlowSearchService { flowObjectFilters, flowCategoryFilters, pending: isPendingFlows, + commitment: isCommitmentFlows, + paid: isPaidFlows, + pledged: isPledgedFlows, + carryover: isCarryoverFlows, + parked: isParkedFlows, + pass_through: isPassThroughFlows, + standard: isStandardFlows, } = filters; + // Validate the shortcut filters + // There must be only one shortcut filter + // if only one is defined + // return an object like + // { {where:{ + // group: 'inactiveReason', + // name: 'Pending review', + // }, operation: 'IN'} } + // if more than one is defined + // throw an error + const shortcutFilter = this.validateShortcutFilters( + isPendingFlows, + isCommitmentFlows, + isPaidFlows, + isPledgedFlows, + isCarryoverFlows, + isParkedFlows, + isPassThroughFlows, + isStandardFlows + ); + // Once we've gathered all the filters, we need to determine the strategy // to use in order to obtain the flowIDs const strategy: FlowSearchStrategy = this.determineStrategy( flowFilters, flowObjectFilters, flowCategoryFilters, - isPendingFlows, + shortcutFilter, orderBy ); @@ -109,7 +137,8 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, - searchPendingFlows: isPendingFlows, + // shortcuts for categories + shortcutFilter, }); // Remove the extra item used to check hasNextPage @@ -280,11 +309,63 @@ export class FlowSearchService { }; } + /** + * This method validates that only one shortcut filter is defined + * and returns the shortcut filter defined with the operation + * IN if is true or NOT IN if is false + * + * @param isPendingFlows + * @param isCommitmentFlows + * @param isPaidFlows + * @param isPledgedFlows + * @param isCarryoverFlows + * @param isParkedFlows + * @param isPassThroughFlows + * @param isStandardFlows + * @returns { category: String, operation: Op.IN | Op.NOT_IN} + */ + validateShortcutFilters( + isPendingFlows: boolean, + isCommitmentFlows: boolean, + isPaidFlows: boolean, + isPledgedFlows: boolean, + isCarryoverFlows: boolean, + isParkedFlows: boolean, + isPassThroughFlows: boolean, + isStandardFlows: boolean + ) { + const filters = [ + { flag: isPendingFlows, category: 'Pending', group: 'inactiveReason' }, + { flag: isCommitmentFlows, category: 'Commitment', group: 'flowStatus' }, + { flag: isPaidFlows, category: 'Paid', group: 'flowStatus' }, + { flag: isPledgedFlows, category: 'Pledged', group: 'flowStatus' }, + { flag: isCarryoverFlows, category: 'Carryover', group: 'flowType' }, + { flag: isParkedFlows, category: 'Parked', group: 'flowType' }, + { flag: isPassThroughFlows, category: 'Pass Through', group: 'flowType' }, + { flag: isStandardFlows, category: 'Standard', group: 'flowType' }, + ]; + + const shortcutFilters = filters + .filter((filter) => filter.flag) + .map((filter) => ({ + where: { group: filter.group, name: filter.category }, + operation: filter.flag ? Op.IN : Op.NOT_IN, + })); + + if (shortcutFilters.length > 1) { + throw new Error( + 'Only one shortcut filter can be defined at the same time' + ); + } + + return shortcutFilters.length === 1 ? shortcutFilters[0] : null; + } + determineStrategy( flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], flowCategoryFilters: FlowCategory[], - isPendingFlows: boolean, + shortcutFilter: any | null, orderBy?: FlowOrderBy ) { // If there are no filters (flowFilters, flowObjectFilters, flowCategoryFilters or pending) @@ -297,18 +378,20 @@ export class FlowSearchService { const isFlowFiltersDefined = flowFilters !== undefined; const isFlowObjectFiltersDefined = flowObjectFilters !== undefined; const isFlowCategoryFiltersDefined = flowCategoryFilters !== undefined; - const isFilterByPendingFlowsDefined = isPendingFlows !== undefined; + // Shortcuts fot categories + const isFilterByShortcutsDefined = shortcutFilter !== null; const isNoFilterDefined = !isFlowFiltersDefined && !isFlowObjectFiltersDefined && !isFlowCategoryFiltersDefined && - !isFilterByPendingFlowsDefined; + !isFilterByShortcutsDefined; + const isFlowFiltersOnly = isFlowFiltersDefined && !isFlowObjectFiltersDefined && !isFlowCategoryFiltersDefined && - !isFilterByPendingFlowsDefined; + !isFilterByShortcutsDefined; if (isOrderByEntityFlow && (isNoFilterDefined || isFlowFiltersOnly)) { // Use onlyFlowFiltersStrategy @@ -683,6 +766,13 @@ export class FlowSearchService { flowObjectFilters, flowCategoryFilters, pending: isPendingFlows, + commitment: isCommitmentFlows, + paid: isPaidFlows, + pledged: isPledgedFlows, + carryover: isCarryoverFlows, + parked: isParkedFlows, + pass_through: isPassThroughFlows, + standard: isStandardFlows, } = args; if (!flowFilters) { @@ -692,13 +782,31 @@ export class FlowSearchService { flowFilters.activeStatus = true; } + // Validate the shortcut filters + // There must be only one shortcut filter + // if only one is defined + // return an object like + // { category: 'Parked', operation: 'IN' } + // if more than one is defined + // throw an error + const shortcutFilter = this.validateShortcutFilters( + isPendingFlows, + isCommitmentFlows, + isPaidFlows, + isPledgedFlows, + isCarryoverFlows, + isParkedFlows, + isPassThroughFlows, + isStandardFlows + ); + // Once we've gathered all the filters, we need to determine the strategy // to use in order to obtain the flowIDs const strategy: FlowSearchStrategy = this.determineStrategy( flowFilters, flowObjectFilters, flowCategoryFilters, - isPendingFlows + shortcutFilter ); const { flows, count } = await strategy.search({ @@ -707,7 +815,8 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, - searchPendingFlows: isPendingFlows, + // shortcuts for categories + shortcutFilter, }); const flowsAmountUSD: Array = flows.map( diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 9ab22ce2..02bb4662 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -2,36 +2,6 @@ import { ArgsType, Field, InputType } from 'type-graphql'; import { PaginationArgs } from '../../../utils/graphql/pagination'; import { type FlowSortField } from './types'; -@InputType() -export class FlowStatusFilters { - @Field(() => Boolean, { nullable: true }) - pending: boolean | null; - - @Field(() => Boolean, { nullable: true }) - commitment: boolean | null; - - @Field(() => Boolean, { nullable: true }) - paid: boolean | null; - - @Field(() => Boolean, { nullable: true }) - pledged: boolean | null; -} - -@InputType() -export class FlowTypeFilters { - @Field(() => Boolean, { nullable: true }) - carryover: boolean | null; - - @Field(() => Boolean, { nullable: true }) - parked: boolean | null; - - @Field(() => Boolean, { nullable: true }) - pass_through: boolean | null; - - @Field(() => Boolean, { nullable: true }) - standard: boolean | null; -} - @InputType() export class SearchFlowsFilters { @Field(() => [Number], { nullable: true }) @@ -116,8 +86,29 @@ export class SearchFlowsArgs extends PaginationArgs { @Field(() => [FlowCategory], { nullable: true }) flowCategoryFilters: FlowCategory[]; - @Field({ nullable: true }) + @Field(() => Boolean, { nullable: true }) pending: boolean; + + @Field(() => Boolean, { nullable: true }) + commitment: boolean; + + @Field(() => Boolean, { nullable: true }) + paid: boolean; + + @Field(() => Boolean, { nullable: true }) + pledged: boolean; + + @Field(() => Boolean, { nullable: true }) + carryover: boolean; + + @Field(() => Boolean, { nullable: true }) + parked: boolean; + + @Field(() => Boolean, { nullable: true }) + pass_through: boolean; + + @Field(() => Boolean, { nullable: true }) + standard: boolean; } @ArgsType() @@ -134,6 +125,27 @@ export class SearchFlowsArgsNonPaginated { @Field(() => [FlowCategory], { nullable: true }) flowCategoryFilters: FlowCategory[]; - @Field({ nullable: true }) + @Field(() => Boolean, { nullable: true }) pending: boolean; + + @Field(() => Boolean, { nullable: true }) + commitment: boolean; + + @Field(() => Boolean, { nullable: true }) + paid: boolean; + + @Field(() => Boolean, { nullable: true }) + pledged: boolean; + + @Field(() => Boolean, { nullable: true }) + carryover: boolean; + + @Field(() => Boolean, { nullable: true }) + parked: boolean; + + @Field(() => Boolean, { nullable: true }) + pass_through: boolean; + + @Field(() => Boolean, { nullable: true }) + standard: boolean; } diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 4264cf70..25e4cff4 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -21,7 +21,7 @@ export interface FlowSearchArgs { limit?: number; orderBy?: any; cursorCondition?: any; - searchPendingFlows?: boolean; + shortcutFilter: any; } export interface FlowSearchStrategy { diff --git a/src/domain-services/flows/strategy/flowID-search-strategy.ts b/src/domain-services/flows/strategy/flowID-search-strategy.ts index e63d09a3..1671d3d9 100644 --- a/src/domain-services/flows/strategy/flowID-search-strategy.ts +++ b/src/domain-services/flows/strategy/flowID-search-strategy.ts @@ -6,13 +6,15 @@ export interface FlowIdSearchStrategyResponse { flowIDs: FlowId[]; } +export interface FlowIdSearchStrategyArgs { + models: Database; + flowObjectsConditions?: Map>; + flowCategoryConditions?: FlowCategory[]; + shortcutFilter?: any | null; +} + export interface FlowIDSearchStrategy { - search( - models: Database, - flowObjectsConditions: Map>, - flowCategoryConditions: FlowCategory[], - filterByPendingFlows?: boolean - ): Promise; + search(args: FlowIdSearchStrategyArgs): Promise; generateWhereClause( flowIds: FlowId[], diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index f811deed..457cbe74 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -1,13 +1,12 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; import { type CategoryId } from '@unocha/hpc-api-core/src/db/models/category'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { CategoryService } from '../../../categories/category-service'; -import { type FlowCategory } from '../../graphql/args'; import { type FlowIDSearchStrategy, + type FlowIdSearchStrategyArgs, type FlowIdSearchStrategyResponse, } from '../flowID-search-strategy'; import { mapFlowCategoryConditionsToWhereClause } from './utils'; @@ -19,14 +18,13 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl constructor(private readonly categoryService: CategoryService) {} async search( - models: Database, - _flowObjectsConditions: Map>, - flowCategoryConditions: FlowCategory[], - filterByPendingFlows: boolean | undefined + args: FlowIdSearchStrategyArgs ): Promise { + const { models, flowCategoryConditions, shortcutFilter } = args; + const whereClause = mapFlowCategoryConditionsToWhereClause( - filterByPendingFlows, - flowCategoryConditions + shortcutFilter, + flowCategoryConditions! ); const categories = await this.categoryService.findCategories( diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts deleted file mode 100644 index ff046cc6..00000000 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-mixed-conditions-strategy-impl.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; -import { Service } from 'typedi'; -import { type FlowCategory } from '../../graphql/args'; -import { - type FlowIDSearchStrategy, - type FlowIdSearchStrategyResponse, -} from '../flowID-search-strategy'; -import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; -import { GetFlowIdsFromObjectConditionsStrategyImpl } from './get-flowIds-flow-object-conditions-strategy-impl'; -import { mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories } from './utils'; - -@Service() -export class GetFlowIdsFromMixedConditionsStrategyImpl - implements FlowIDSearchStrategy -{ - constructor( - private readonly getFlowIdsFromObjectConditionsStrategy: GetFlowIdsFromObjectConditionsStrategyImpl, - private readonly getFlowIdsFromCategoryConditionsStrategy: GetFlowIdsFromCategoryConditionsStrategyImpl - ) {} - - async search( - models: Database, - flowObjectsConditions: Map>, - flowCategoryConditions: FlowCategory[], - filterByPendingFlows: boolean - ): Promise { - const { flowIDs: flowIdsFromFlowObjects }: FlowIdSearchStrategyResponse = - await this.getFlowIdsFromObjectConditionsStrategy.search( - models, - flowObjectsConditions - ); - - const { flowIDs: flowIdsFromFlowCategories }: FlowIdSearchStrategyResponse = - await this.getFlowIdsFromCategoryConditionsStrategy.search( - models, - flowObjectsConditions, - flowCategoryConditions, - filterByPendingFlows - ); - - const mergeFlowIDs: FlowId[] = - mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( - flowIdsFromFlowObjects, - flowIdsFromFlowCategories - ); - - return { flowIDs: mergeFlowIDs }; - } - - generateWhereClause(flowIds: FlowId[]) { - return { - id: { - [Op.IN]: flowIds, - }, - }; - } -} diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts index 974436f5..f8bd56ba 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts @@ -1,10 +1,10 @@ -import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; import { type FlowIDSearchStrategy, + type FlowIdSearchStrategyArgs, type FlowIdSearchStrategyResponse, } from '../flowID-search-strategy'; import { mapFlowObjectConditionsToWhereClause } from './utils'; @@ -16,11 +16,11 @@ export class GetFlowIdsFromObjectConditionsStrategyImpl constructor(private readonly flowObjectService: FlowObjectService) {} async search( - models: Database, - flowObjectsConditions: Map> + args: FlowIdSearchStrategyArgs ): Promise { + const { models, flowObjectsConditions } = args; const flowObjectWhere = mapFlowObjectConditionsToWhereClause( - flowObjectsConditions + flowObjectsConditions! ); const flowIDsFromFilteredFlowObjects: FlowId[] = []; diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 34d69c1c..6aac7d19 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -34,7 +34,8 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { flowCategoryFilters, orderBy, limit, - searchPendingFlows: isSearchPendingFlows, + cursorCondition, + shortcutFilter, } = args; // First, we need to check if we need to sort by a certain entity @@ -57,24 +58,29 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { } // Now we need to check if we need to filter by category - // if it's using the shorcut 'pending' + // if it's using any of the shorcuts // or if there are any flowCategoryFilters - const isSearchByPendingDefined = isSearchPendingFlows !== undefined; + const isSearchByCategoryShotcut = shortcutFilter !== null; const isFilterByCategory = - isSearchByPendingDefined || flowCategoryFilters?.length > 0; + isSearchByCategoryShotcut || flowCategoryFilters?.length > 0; const flowIDsFromCategoryFilters: FlowId[] = []; if (isFilterByCategory) { - const flowIDsFromCategoryStrategy: FlowIdSearchStrategyResponse = - await this.getFlowIdsFromCategoryConditions.search( + const { flowIDs }: FlowIdSearchStrategyResponse = + await this.getFlowIdsFromCategoryConditions.search({ models, - new Map(), - flowCategoryFilters ?? [], - isSearchPendingFlows - ); - flowIDsFromCategoryFilters.push(...flowIDsFromCategoryStrategy.flowIDs); + flowCategoryConditions: flowCategoryFilters ?? [], + shortcutFilter, + flowObjectsConditions: undefined, + }); + // Since there can be many flowIDs returned + // This can cause 'Maximum call stack size exceeded' error + // When using the spread operator - a workaround is to use push fot each element + for (const flowID of flowIDs) { + flowIDsFromCategoryFilters.push(flowID); + } } // After that, if we need to filter by flowObjects @@ -86,10 +92,10 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { const flowObjectConditionsMap = mapFlowObjectConditions(flowObjectFilters); const flowIDsFromObjectStrategy: FlowIdSearchStrategyResponse = - await this.getFlowIdsFromObjectConditions.search( + await this.getFlowIdsFromObjectConditions.search({ models, - flowObjectConditionsMap - ); + flowObjectsConditions: flowObjectConditionsMap, + }); flowIDsFromObjectFilters.push(...flowIDsFromObjectStrategy.flowIDs); } @@ -100,26 +106,25 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { { isFilterByFlowObjects, isFilterByCategory, - willSearchPendingFlows: isSearchPendingFlows, - isSearchByPendingDefined, + shortcutFilter, }, { flowIDsFromCategoryFilters, flowIDsFromObjectFilters, flowFilters, + cursorCondition, } ); - let rawOrderBy: string = ''; + let rawOrderBy: string | undefined; let orderByFlow: | { column: any; order: any; } - | undefined = { column: 'updatedAt', order: 'DESC' }; + | undefined; if (isSortByEntity) { rawOrderBy = `array_position(ARRAY[${sortByFlowIDs.join(',')}], "id")`; - orderByFlow = undefined; } else { orderByFlow = mapFlowOrderBy(orderBy); } @@ -140,39 +145,34 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { return { flows, count: countObject.count }; } + buildConditions( decisionArgs: { isFilterByFlowObjects: boolean; isFilterByCategory: boolean; - willSearchPendingFlows: boolean | undefined; - isSearchByPendingDefined: boolean; + shortcutFilter: any | null; }, filterArgs: { flowIDsFromCategoryFilters: FlowId[]; flowIDsFromObjectFilters: FlowId[]; flowFilters: any | undefined; + cursorCondition: any | undefined; } ): { countConditions: any; searchConditions: any } { - const { - isFilterByFlowObjects, - isFilterByCategory, - willSearchPendingFlows, - isSearchByPendingDefined, - } = decisionArgs; + const { isFilterByFlowObjects, isFilterByCategory, shortcutFilter } = + decisionArgs; const { flowIDsFromCategoryFilters, flowIDsFromObjectFilters, flowFilters, + cursorCondition, } = filterArgs; let countConditions: any = {}; let searchConditions: any = {}; // Check if we have flowIDs from flowObjects and flowCategoryFilters // if so, we need to filter by those flowIDs - if ( - (isFilterByFlowObjects || isFilterByCategory) && - isSearchByPendingDefined - ) { + if ((isFilterByFlowObjects || isFilterByCategory) && shortcutFilter) { const deduplicatedFlowIDs = [...new Set(flowIDsFromCategoryFilters)]; searchConditions = { @@ -216,12 +216,8 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { }, ], }; - } else if (isFilterByCategory || isSearchByPendingDefined) { - const idCondition = isSearchByPendingDefined - ? willSearchPendingFlows - ? Op.IN - : Op.NOT_IN - : Op.IN; + } else if (isFilterByCategory || shortcutFilter) { + const idCondition = shortcutFilter ? shortcutFilter.operation : Op.IN; searchConditions = { ...searchConditions, @@ -262,6 +258,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // Combine cursor condition with flow conditions searchConditions = { ...searchConditions, + ...cursorCondition, [Cond.AND]: [flowConditions ?? {}], }; } diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index 5848ae8a..fe4e4a5b 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -42,18 +42,17 @@ export function mapFlowObjectConditionsToWhereClause( } export function mapFlowCategoryConditionsToWhereClause( - filterByPendingFlows: boolean | undefined, + shortcutFilter: any | null, flowCategoryConditions: FlowCategory[] ) { let whereClause = {}; - if (filterByPendingFlows !== undefined) { + const shortcutsWhereClause = shortcutFilter ? shortcutFilter.where : null; + if (shortcutsWhereClause) { whereClause = { - group: 'inactiveReason', - name: 'Pending review', + [Cond.OR]: [shortcutsWhereClause], }; } - if (flowCategoryConditions.length > 0) { // Map category filters // getting Id when possible diff --git a/yarn.lock b/yarn.lock index 24ba2e20..2d57d7da 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1624,9 +1624,9 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@unocha/hpc-api-core@github:UN-OCHA/hpc-api-core#3a3030ee83ad77e5fd7c40238d5ecabe1e6c7da9": +"@unocha/hpc-api-core@github:UN-OCHA/hpc-api-core#e298382f38848370c6daa0ac86b2016eddbef356": version "7.0.0" - resolved "https://codeload.github.com/UN-OCHA/hpc-api-core/tar.gz/3a3030ee83ad77e5fd7c40238d5ecabe1e6c7da9" + resolved "https://codeload.github.com/UN-OCHA/hpc-api-core/tar.gz/e298382f38848370c6daa0ac86b2016eddbef356" dependencies: "@types/lodash" "^4.14.194" "@types/node-fetch" "2.6.3" From a4be794caef4ca5997bf8bdcd32a7cfbd4d5b2a2 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 9 Jan 2024 20:58:42 +0100 Subject: [PATCH 55/67] Temp: ref --- .../flow-object/flow-object-service.ts | 22 ++ .../flows/flow-search-service.ts | 108 ++++---- src/domain-services/flows/flow-service.ts | 31 ++- src/domain-services/flows/graphql/resolver.ts | 1 + src/domain-services/flows/model.ts | 16 ++ .../flows/strategy/flow-search-strategy.ts | 4 +- .../flows/strategy/flowID-search-strategy.ts | 7 +- ...-flow-category-conditions-strategy-impl.ts | 162 +++++++++-- ...ds-flow-object-conditions-strategy-impl.ts | 11 +- .../only-flow-conditions-strategy-impl.ts | 12 +- .../search-flow-by-filters-strategy-impl.ts | 255 +++++++----------- .../flows/strategy/impl/utils.ts | 107 +++++++- src/utils/graphql/pagination.ts | 18 +- 13 files changed, 466 insertions(+), 288 deletions(-) diff --git a/src/domain-services/flow-object/flow-object-service.ts b/src/domain-services/flow-object/flow-object-service.ts index aeb25120..0baf427b 100644 --- a/src/domain-services/flow-object/flow-object-service.ts +++ b/src/domain-services/flow-object/flow-object-service.ts @@ -1,7 +1,9 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; +import { UniqueFlowEntity } from '../flows/model'; @Service() export class FlowObjectService { @@ -16,6 +18,26 @@ export class FlowObjectService { return [...new Set(flowObjects.map((flowObject) => flowObject.flowID))]; } + async getFlowFromFlowObjects( + models: Database, + where: any + ): Promise { + const flowObjects = await models.flowObject.find({ + where, + }); + // Keep only not duplicated flowIDs + return [ + ...new Set( + flowObjects.map((flowObject) => { + return { + id: createBrandedValue(flowObject.flowID), + versionID: flowObject.versionID, + }; + }) + ), + ]; + } + async getFlowObjectByFlowId(models: Database, flowIds: FlowId[]) { return await models.flowObject.find({ where: { diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index f3bfb9fc..1c34d314 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -90,17 +90,10 @@ export class FlowSearchService { standard: isStandardFlows, } = filters; - // Validate the shortcut filters - // There must be only one shortcut filter - // if only one is defined // return an object like - // { {where:{ - // group: 'inactiveReason', - // name: 'Pending review', - // }, operation: 'IN'} } - // if more than one is defined - // throw an error - const shortcutFilter = this.validateShortcutFilters( + // { name: 'Pending review', + // operation: 'IN'} [] + const shortcutFilter = this.mapShortcutFilters( isPendingFlows, isCommitmentFlows, isPaidFlows, @@ -122,18 +115,20 @@ export class FlowSearchService { ); // Build cursor condition - const cursorCondition = this.buildCursorCondition( - prevPageCursor, - nextPageCursor, - orderBy - ); + // const cursorCondition = this.buildCursorCondition( + // prevPageCursor, + // nextPageCursor, + // orderBy + // ); + + const offset = nextPageCursor ?? prevPageCursor ?? 0; const { flows, count } = await strategy.search({ models, databaseConnection, limit, orderBy, - cursorCondition, + offset, flowFilters, flowObjectFilters, flowCategoryFilters, @@ -275,33 +270,33 @@ export class FlowSearchService { }) ); - const isOrderByForFlows = orderBy.entity === 'flow'; - const firstItem = items[0]; - const prevPageCursorEntity = isOrderByForFlows - ? firstItem - : firstItem[orderBy.entity as keyof typeof firstItem]; - const prevPageCursorValue = prevPageCursorEntity - ? prevPageCursorEntity[ - orderBy.column as keyof typeof prevPageCursorEntity - ] ?? '' - : ''; - - const lastItem = items.at(-1); - const nextPageCursorEntity = isOrderByForFlows - ? lastItem - : lastItem![orderBy.entity as keyof typeof lastItem]; - const nextPageCursorValue = nextPageCursorEntity - ? nextPageCursorEntity[ - orderBy.column as keyof typeof nextPageCursorEntity - ]?.toString() ?? '' - : ''; + // const isOrderByForFlows = orderBy.entity === 'flow'; + // const firstItem = items[0]; + // const prevPageCursorEntity = isOrderByForFlows + // ? firstItem + // : firstItem[orderBy.entity as keyof typeof firstItem]; + // const prevPageCursorValue = prevPageCursorEntity + // ? prevPageCursorEntity[ + // orderBy.column as keyof typeof prevPageCursorEntity + // ] ?? '' + // : ''; + + // const lastItem = items.at(-1); + // const nextPageCursorEntity = isOrderByForFlows + // ? lastItem + // : lastItem![orderBy.entity as keyof typeof lastItem]; + // const nextPageCursorValue = nextPageCursorEntity + // ? nextPageCursorEntity[ + // orderBy.column as keyof typeof nextPageCursorEntity + // ]?.toString() ?? '' + // : ''; return { flows: items, hasNextPage: limit <= flows.length, hasPreviousPage: nextPageCursor !== undefined, - prevPageCursor: prevPageCursorValue, - nextPageCursor: nextPageCursorValue, + prevPageCursor: nextPageCursor ?? 0, + nextPageCursor: nextPageCursor ? nextPageCursor + limit : limit, pageSize: flows.length, sortField: `${orderBy.entity}.${orderBy.column}` as FlowSortField, sortOrder: sortOrder ?? 'desc', @@ -310,8 +305,7 @@ export class FlowSearchService { } /** - * This method validates that only one shortcut filter is defined - * and returns the shortcut filter defined with the operation + * This method returns the shortcut filter defined with the operation * IN if is true or NOT IN if is false * * @param isPendingFlows @@ -322,9 +316,9 @@ export class FlowSearchService { * @param isParkedFlows * @param isPassThroughFlows * @param isStandardFlows - * @returns { category: String, operation: Op.IN | Op.NOT_IN} + * @returns [{ category: String, operation: Op.IN | Op.NOT_IN}] */ - validateShortcutFilters( + mapShortcutFilters( isPendingFlows: boolean, isCommitmentFlows: boolean, isPaidFlows: boolean, @@ -335,30 +329,24 @@ export class FlowSearchService { isStandardFlows: boolean ) { const filters = [ - { flag: isPendingFlows, category: 'Pending', group: 'inactiveReason' }, - { flag: isCommitmentFlows, category: 'Commitment', group: 'flowStatus' }, - { flag: isPaidFlows, category: 'Paid', group: 'flowStatus' }, - { flag: isPledgedFlows, category: 'Pledged', group: 'flowStatus' }, - { flag: isCarryoverFlows, category: 'Carryover', group: 'flowType' }, - { flag: isParkedFlows, category: 'Parked', group: 'flowType' }, - { flag: isPassThroughFlows, category: 'Pass Through', group: 'flowType' }, - { flag: isStandardFlows, category: 'Standard', group: 'flowType' }, + { flag: isPendingFlows, category: 'Pending' }, + { flag: isCommitmentFlows, category: 'Commitment' }, + { flag: isPaidFlows, category: 'Paid' }, + { flag: isPledgedFlows, category: 'Pledged' }, + { flag: isCarryoverFlows, category: 'Carryover' }, + { flag: isParkedFlows, category: 'Parked' }, + { flag: isPassThroughFlows, category: 'Pass Through' }, + { flag: isStandardFlows, category: 'Standard' }, ]; const shortcutFilters = filters - .filter((filter) => filter.flag) + .filter((filter) => filter.flag !== undefined) .map((filter) => ({ - where: { group: filter.group, name: filter.category }, + category: filter.category, operation: filter.flag ? Op.IN : Op.NOT_IN, })); - if (shortcutFilters.length > 1) { - throw new Error( - 'Only one shortcut filter can be defined at the same time' - ); - } - - return shortcutFilters.length === 1 ? shortcutFilters[0] : null; + return shortcutFilters; } determineStrategy( @@ -789,7 +777,7 @@ export class FlowSearchService { // { category: 'Parked', operation: 'IN' } // if more than one is defined // throw an error - const shortcutFilter = this.validateShortcutFilters( + const shortcutFilter = this.mapShortcutFilters( isPendingFlows, isCommitmentFlows, isPaidFlows, diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index e08c44c7..ca3f6982 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -1,27 +1,25 @@ -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type Database } from '@unocha/hpc-api-core/src/db/type'; import type Knex from 'knex'; import { Service } from 'typedi'; import { type FlowObjectType } from '../flow-object/model'; -import { type FlowOrderBy } from './model'; -import { mapFlowOrderBy } from './strategy/impl/utils'; +import { GetFlowsArgs, UniqueFlowEntity, type FlowOrderBy } from './model'; +import { + mapFlowOrderBy, + removeDuplicatesUniqueFlowEntities, +} from './strategy/impl/utils'; @Service() export class FlowService { constructor() {} - async getFlows( - models: Database, - conditions: any, - orderBy?: any, - limit?: number, - rawOrderBy?: string - ) { + async getFlows(args: GetFlowsArgs) { + const { models, conditions, offset, orderBy, limit } = args; + return await models.flow.find({ orderBy, limit, where: conditions, - orderByRaw: rawOrderBy, + offset, }); } @@ -34,7 +32,7 @@ export class FlowService { dbConnection: Knex, orderBy: FlowOrderBy, limit?: number - ): Promise { + ): Promise { const entity = orderBy.subEntity ?? orderBy.entity; // Get the entity list @@ -52,7 +50,7 @@ export class FlowService { const query = dbConnection .queryBuilder() - .select('flowID') + .select('flowID', 'versionID') .from('flowObject') .whereIn('objectID', entityIDs) .andWhere('objectType', entityCondKey) @@ -65,6 +63,11 @@ export class FlowService { } const flowIDs = await query; - return flowIDs.map((flowID) => flowID.flowID); + const mapFlowsToUniqueFlowEntities = flowIDs.map((flowID) => ({ + id: flowID.flowID, + versionID: flowID.versionID, + })); + + return removeDuplicatesUniqueFlowEntities(mapFlowsToUniqueFlowEntities); } } diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 78c4d9d5..4f3e1df2 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -4,6 +4,7 @@ import Context from '../../Context'; import { FlowSearchService } from '../flow-search-service'; import { SearchFlowsArgs, SearchFlowsArgsNonPaginated } from './args'; import { + BaseFlow, Flow, FlowSearchResult, FlowSearchResultNonPaginated, diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 55fb4ead..03400739 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -1,8 +1,16 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import Knex from 'knex'; import { type SortOrder } from '../../utils/graphql/pagination'; + export type FlowEntity = InstanceDataOfModel; +export type UniqueFlowEntity = { + id: FlowId; + versionID: number; +}; + export type FlowOrderBy = { column: string; order: SortOrder; @@ -12,3 +20,11 @@ export type FlowOrderBy = { }; export type FlowNestedDirection = 'source' | 'destination'; + +export type GetFlowsArgs = { + models: Database; + conditions: any; + offset?: number; + orderBy?: any; + limit?: number; +}; diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 25e4cff4..127eef67 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -18,10 +18,10 @@ export interface FlowSearchArgs { flowFilters: SearchFlowsFilters; flowObjectFilters: FlowObjectFilters[]; flowCategoryFilters: FlowCategory[]; + shortcutFilter: any; limit?: number; + offset?: number; orderBy?: any; - cursorCondition?: any; - shortcutFilter: any; } export interface FlowSearchStrategy { diff --git a/src/domain-services/flows/strategy/flowID-search-strategy.ts b/src/domain-services/flows/strategy/flowID-search-strategy.ts index 1671d3d9..b219299c 100644 --- a/src/domain-services/flows/strategy/flowID-search-strategy.ts +++ b/src/domain-services/flows/strategy/flowID-search-strategy.ts @@ -1,16 +1,19 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import Knex from 'knex'; import { type FlowCategory } from '../graphql/args'; +import { type UniqueFlowEntity } from '../model'; export interface FlowIdSearchStrategyResponse { - flowIDs: FlowId[]; + flows: UniqueFlowEntity[]; } export interface FlowIdSearchStrategyArgs { + databaseConnection: Knex; models: Database; flowObjectsConditions?: Map>; flowCategoryConditions?: FlowCategory[]; - shortcutFilter?: any | null; + shortcutFilter?: any[] | null; } export interface FlowIDSearchStrategy { diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index 457cbe74..36f437bf 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -1,15 +1,19 @@ import { type CategoryId } from '@unocha/hpc-api-core/src/db/models/category'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { CategoryService } from '../../../categories/category-service'; +import { UniqueFlowEntity } from '../../model'; import { type FlowIDSearchStrategy, type FlowIdSearchStrategyArgs, type FlowIdSearchStrategyResponse, } from '../flowID-search-strategy'; -import { mapFlowCategoryConditionsToWhereClause } from './utils'; +import { + mapFlowCategoryConditionsToWhereClause, + removeDuplicatesUniqueFlowEntities, +} from './utils'; @Service() export class GetFlowIdsFromCategoryConditionsStrategyImpl @@ -17,40 +21,158 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl { constructor(private readonly categoryService: CategoryService) {} + private readonly categoryIDsMap: Map = new Map([ + ['Pending', 45], + ['Pledge', 46], + ['Commitment', 47], + ['Paid', 48], + ['Standard', 133], + ['Pass through', 136], + ['Carryover', 137], + ['Parked', 1252], + ]); + async search( args: FlowIdSearchStrategyArgs ): Promise { - const { models, flowCategoryConditions, shortcutFilter } = args; + const { + models, + flowCategoryConditions, + shortcutFilter, + databaseConnection, + } = args; + + const categoriesIds: CategoryId[] = []; const whereClause = mapFlowCategoryConditionsToWhereClause( - shortcutFilter, flowCategoryConditions! ); - const categories = await this.categoryService.findCategories( - models, - whereClause - ); + if (whereClause) { + const categories = await this.categoryService.findCategories( + models, + whereClause + ); + + categories.map((category) => category.id); + } - const categoriesIds: CategoryId[] = categories.map( - (category) => category.id + // Add category IDs from shortcut filter + // to the list of category IDs IN or NOT_IN + const categoriesIdsFromShortcutFilterIN: CategoryId[] = []; + const categoriesIdsFromShortcutFilterNOTIN: CategoryId[] = []; + + if (shortcutFilter) { + for (const shortcut of shortcutFilter) { + const shortcutCategoryID = this.categoryIDsMap.get(shortcut.category); + if (shortcutCategoryID) { + if (shortcut.operation === Op.IN) { + categoriesIdsFromShortcutFilterIN.push( + createBrandedValue(shortcutCategoryID) + ); + } else { + categoriesIdsFromShortcutFilterNOTIN.push( + createBrandedValue(shortcutCategoryID) + ); + } + } + } + } + + let query = databaseConnection + .queryBuilder() + .distinct('objectID', 'versionID') + .select('objectID', 'versionID') + .from('categoryRef'); + + if (categoriesIds.length > 0) { + query = query + .orWhere('categoryID', 'IN', categoriesIds) + .andWhere('objectType', 'flow'); + } + + if (categoriesIdsFromShortcutFilterIN.length > 0) { + query = query + .orWhere('categoryID', 'IN', categoriesIdsFromShortcutFilterIN) + .andWhere('objectType', 'flow'); + } + + if (categoriesIdsFromShortcutFilterNOTIN.length > 0) { + query = query + .orWhere('categoryID', 'NOT IN', categoriesIdsFromShortcutFilterNOTIN) + .andWhere('objectType', 'flow'); + } + + const flows = await query; + console.log('flows', flows.length); + const mapFlows: UniqueFlowEntity[] = flows.map( + (flow) => + ({ + id: flow.objectID, + versionID: flow.versionID, + }) as UniqueFlowEntity ); - const categoryRefs = await this.categoryService.findCategoryRefs(models, { - categoryID: { - [Op.IN]: categoriesIds, - }, - objectType: 'flow', - }); + return { flows: mapFlows }; + // const whereClauseCategoryRef = { + // [Cond.OR]: [ + // categoriesIds.length > 0 + // ? { + // [Cond.AND]: [ + // { + // categoryID: { + // [Op.IN]: categoriesIds, + // }, + // }, + // { objectType: 'flow' }, + // ], + // } + // : {}, + // categoriesIdsFromShortcutFilterIN.length > 0 + // ? { + // [Cond.AND]: [ + // { + // categoryID: { + // [Op.IN]: categoriesIdsFromShortcutFilterIN, + // }, + // }, + // { objectType: 'flow' }, + // ], + // } + // : {}, + // categoriesIdsFromShortcutFilterNOTIN.length > 0 + // ? { + // [Cond.AND]: [ + // { + // categoryID: { + // [Op.NOT_IN]: categoriesIdsFromShortcutFilterNOTIN, + // }, + // }, + // { objectType: 'flow' }, + // ], + // } + // : {}, + // ], + // }; + + // const categoryRefs = await this.categoryService.findCategoryRefs( + // models, + // whereClauseCategoryRef + // ); // Map category refs to flow IDs // keep only unique values // and return the list of flow IDs - const flowIds = [ - ...new Set(categoryRefs.map((categoryRef) => categoryRef.objectID)), - ].map((flowId) => createBrandedValue(flowId)); + // const mapFlowsToUniqueFlowEntities = categoryRefs.map((categoryRef) => { + // return { + // id: categoryRef.objectID, + // versionID: categoryRef.versionID, + // } as UniqueFlowEntity; + // }) + + // const flows = removeDuplicatesUniqueFlowEntities(mapFlowsToUniqueFlowEntities); - return { flowIDs: flowIds }; + // return { flows }; } generateWhereClause( diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts index f8bd56ba..e6d258f7 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts @@ -2,6 +2,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; +import { UniqueFlowEntity } from '../../model'; import { type FlowIDSearchStrategy, type FlowIdSearchStrategyArgs, @@ -23,18 +24,18 @@ export class GetFlowIdsFromObjectConditionsStrategyImpl flowObjectsConditions! ); - const flowIDsFromFilteredFlowObjects: FlowId[] = []; - const tempFlowIDs: FlowId[][] = await Promise.all( + const flowsFromFilteredFlowObjects: UniqueFlowEntity[] = []; + const tempFlowIDs: UniqueFlowEntity[][] = await Promise.all( flowObjectWhere.map((whereClause) => - this.flowObjectService.getFlowIdsFromFlowObjects(models, whereClause) + this.flowObjectService.getFlowFromFlowObjects(models, whereClause) ) ); // Flatten array of arrays keeping only values present in all arrays const flowIDs = tempFlowIDs.flat(); - flowIDsFromFilteredFlowObjects.push(...new Set(flowIDs)); + flowsFromFilteredFlowObjects.push(...new Set(flowIDs)); - return { flowIDs: flowIDsFromFilteredFlowObjects }; + return { flows: flowsFromFilteredFlowObjects }; } generateWhereClause(flowIds: FlowId[]) { diff --git a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts index d77cbf8d..666ee00c 100644 --- a/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/only-flow-conditions-strategy-impl.ts @@ -17,19 +17,25 @@ export class OnlyFlowFiltersStrategy implements FlowSearchStrategy { constructor(private readonly flowService: FlowService) {} async search(args: FlowSearchArgs): Promise { - const { models, flowFilters, orderBy, limit, cursorCondition } = args; + const { models, flowFilters, orderBy, limit, offset } = args; // Map flowConditions to where clause const flowConditions = prepareFlowConditions(flowFilters); // Build conditions object const searchConditions = { - [Cond.AND]: [flowConditions ?? {}, cursorCondition ?? {}], + [Cond.AND]: [flowConditions ?? {}], }; const orderByFlow = mapFlowOrderBy(orderBy); const [flows, countRes] = await Promise.all([ - this.flowService.getFlows(models, searchConditions, orderByFlow, limit), + this.flowService.getFlows({ + models, + conditions: searchConditions, + offset, + orderBy: orderByFlow, + limit, + }), this.flowService.getFlowsCount(models, flowConditions), ]); diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 6aac7d19..0ba3cb69 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -2,6 +2,7 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; +import { UniqueFlowEntity } from '../../model'; import { type FlowSearchArgs, type FlowSearchStrategy, @@ -11,10 +12,13 @@ import { type FlowIdSearchStrategyResponse } from '../flowID-search-strategy'; import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; import { GetFlowIdsFromObjectConditionsStrategyImpl } from './get-flowIds-flow-object-conditions-strategy-impl'; import { + intersectUniqueFlowEntities, mapCountResultToCountObject, mapFlowObjectConditions, mapFlowOrderBy, + mergeUniqueEntities, prepareFlowConditions, + sortEntitiesByReferenceList, } from './utils'; @Service() @@ -34,7 +38,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { flowCategoryFilters, orderBy, limit, - cursorCondition, + offset, shortcutFilter, } = args; @@ -44,10 +48,10 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // to be able to sort the flows using the entity const isSortByEntity = orderBy && orderBy.entity !== 'flow'; - const sortByFlowIDs: FlowId[] = []; + const sortByFlowIDs: UniqueFlowEntity[] = []; if (isSortByEntity) { // Get the flowIDs using the orderBy entity - const flowIDsFromSortingEntity: FlowId[] = + const flowIDsFromSortingEntity: UniqueFlowEntity[] = await this.flowService.getFlowIDsFromEntity( models, databaseConnection, @@ -55,6 +59,27 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { limit ); sortByFlowIDs.push(...flowIDsFromSortingEntity); + } else { + // In this case we fetch the list of flows from the database + // using the orderBy + // We can also filter by flowFilters + const flowConditions = prepareFlowConditions(flowFilters); + + const flowsToSort = await this.flowService.getFlows({ + models, + conditions: flowConditions, + orderBy: { column: orderBy.column, order: orderBy.order }, + }); + + const flowIDsFromSortingEntity: UniqueFlowEntity[] = flowsToSort.map( + (flow) => ({ id: flow.id, versionID: flow.versionID }) + ); + // Since there can be many flowIDs returned + // This can cause 'Maximum call stack size exceeded' error + // When using the spread operator - a workaround is to use push fot each element + for (const flow of flowIDsFromSortingEntity) { + sortByFlowIDs.push(flow); + } } // Now we need to check if we need to filter by category @@ -65,11 +90,12 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { const isFilterByCategory = isSearchByCategoryShotcut || flowCategoryFilters?.length > 0; - const flowIDsFromCategoryFilters: FlowId[] = []; + const flowsFromCategoryFilters: UniqueFlowEntity[] = []; if (isFilterByCategory) { - const { flowIDs }: FlowIdSearchStrategyResponse = + const { flows }: FlowIdSearchStrategyResponse = await this.getFlowIdsFromCategoryConditions.search({ + databaseConnection, models, flowCategoryConditions: flowCategoryFilters ?? [], shortcutFilter, @@ -78,8 +104,8 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // Since there can be many flowIDs returned // This can cause 'Maximum call stack size exceeded' error // When using the spread operator - a workaround is to use push fot each element - for (const flowID of flowIDs) { - flowIDsFromCategoryFilters.push(flowID); + for (const flow of flows) { + flowsFromCategoryFilters.push(flow); } } @@ -87,181 +113,84 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // Obtain the flowIDs from the flowObjects const isFilterByFlowObjects = flowObjectFilters?.length > 0; - const flowIDsFromObjectFilters: FlowId[] = []; + const flowsFromObjectFilters: UniqueFlowEntity[] = []; if (isFilterByFlowObjects) { const flowObjectConditionsMap = mapFlowObjectConditions(flowObjectFilters); - const flowIDsFromObjectStrategy: FlowIdSearchStrategyResponse = + const { flows }: FlowIdSearchStrategyResponse = await this.getFlowIdsFromObjectConditions.search({ + databaseConnection, models, flowObjectsConditions: flowObjectConditionsMap, }); - flowIDsFromObjectFilters.push(...flowIDsFromObjectStrategy.flowIDs); + flowsFromObjectFilters.push(...flows); } - // We need to have two conditions, one for the search and one for the count - // 'countConditions' => Apply only filter conditions but not cursor conditions - // 'searchConditions' => Apply both filter and cursor conditions - const { countConditions, searchConditions } = this.buildConditions( - { - isFilterByFlowObjects, - isFilterByCategory, - shortcutFilter, - }, - { - flowIDsFromCategoryFilters, - flowIDsFromObjectFilters, - flowFilters, - cursorCondition, - } - ); - - let rawOrderBy: string | undefined; - let orderByFlow: - | { - column: any; - order: any; - } - | undefined; - if (isSortByEntity) { - rawOrderBy = `array_position(ARRAY[${sortByFlowIDs.join(',')}], "id")`; - } else { - orderByFlow = mapFlowOrderBy(orderBy); - } + // Lastly, we need to check if we need to filter by flow + // And if we didn't did it before when sorting by entity + // if so, we need to obtain the flowIDs from the flowFilters + const isFilterByFlow = flowFilters !== undefined; - const [flows, countRes] = await Promise.all([ - this.flowService.getFlows( + const flowsFromFlowFilters: UniqueFlowEntity[] = []; + if (isSortByEntity && isFilterByFlow) { + const flowConditions = prepareFlowConditions(flowFilters); + const flows = await this.flowService.getFlows({ models, - searchConditions, - orderByFlow, - limit, - rawOrderBy - ), - this.flowService.getFlowsCount(models, countConditions), - ]); - - // Map count result query to count object - const countObject = mapCountResultToCountObject(countRes); + conditions: flowConditions, + orderBy: { column: orderBy.column, order: orderBy.order }, + }); + for (const flow of flows) { + flowsFromFlowFilters.push({ id: flow.id, versionID: flow.versionID }); + } + } - return { flows, count: countObject.count }; - } + // We need to intersect the flowIDs from the flowObjects, flowCategoryFilters and flowFilters + // to obtain the flowIDs that match all the filters + const deduplicatedFlows: UniqueFlowEntity[] = intersectUniqueFlowEntities( + flowsFromCategoryFilters, + flowsFromObjectFilters, + flowsFromFlowFilters, + sortByFlowIDs + ); - buildConditions( - decisionArgs: { - isFilterByFlowObjects: boolean; - isFilterByCategory: boolean; - shortcutFilter: any | null; - }, - filterArgs: { - flowIDsFromCategoryFilters: FlowId[]; - flowIDsFromObjectFilters: FlowId[]; - flowFilters: any | undefined; - cursorCondition: any | undefined; - } - ): { countConditions: any; searchConditions: any } { - const { isFilterByFlowObjects, isFilterByCategory, shortcutFilter } = - decisionArgs; - const { - flowIDsFromCategoryFilters, - flowIDsFromObjectFilters, - flowFilters, - cursorCondition, - } = filterArgs; - let countConditions: any = {}; - let searchConditions: any = {}; + // Obtain the count of the flows that match the filters + const count = deduplicatedFlows.length; - // Check if we have flowIDs from flowObjects and flowCategoryFilters - // if so, we need to filter by those flowIDs - if ((isFilterByFlowObjects || isFilterByCategory) && shortcutFilter) { - const deduplicatedFlowIDs = [...new Set(flowIDsFromCategoryFilters)]; + // After obtaining the count, we need to obtain the flows + // that match the filters + // First we are going to sort the deduplicated flows + // using the sortByFlowIDs if there are any + const sortedFlows: UniqueFlowEntity[] = sortEntitiesByReferenceList( + deduplicatedFlows, + sortByFlowIDs + ); - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: deduplicatedFlowIDs, - }, - }, - ], - }; - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: deduplicatedFlowIDs, - }, - }, - ], - }; - } else if (isFilterByFlowObjects) { - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: flowIDsFromObjectFilters, - }, - }, - ], - }; - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - [Op.IN]: flowIDsFromObjectFilters, - }, - }, - ], - }; - } else if (isFilterByCategory || shortcutFilter) { - const idCondition = shortcutFilter ? shortcutFilter.operation : Op.IN; + // Then we are going to slice the flows using the limit and offset + const reducedFlows: UniqueFlowEntity[] = sortedFlows.slice( + offset, + offset! + limit! + ); - searchConditions = { - ...searchConditions, - [Cond.AND]: [ - { - id: { - [idCondition]: flowIDsFromCategoryFilters, - }, - }, - ], - }; + // Once the list of elements is reduced, we need to build the conditions + const searchConditions = this.buildConditions(reducedFlows); - countConditions = { - ...countConditions, - [Cond.AND]: [ - { - id: { - idCondition: flowIDsFromCategoryFilters, - }, - }, - ], - }; - } + const flows = await this.flowService.getFlows({ + models, + conditions: searchConditions, + limit, + orderBy: { column: orderBy.column, order: orderBy.order }, + }); - // After adding the where clauses form the filters - // we need to add the conditions from the flow entity filters - // if there are any - if (flowFilters) { - // Map flowConditions to where clause - const flowConditions = prepareFlowConditions(flowFilters); + return { flows, count }; + } - // Combine conditions from flowObjects FlowIDs and flow conditions - countConditions = { - ...countConditions, - [Cond.AND]: [flowConditions ?? {}], - }; + buildConditions(uniqueFlowEntities: UniqueFlowEntity[]): any { + const whereClauses = uniqueFlowEntities.map((flow) => ({ + [Cond.AND]: [{ id: flow.id }, { versionID: flow.versionID }], + })); - // Combine cursor condition with flow conditions - searchConditions = { - ...searchConditions, - ...cursorCondition, - [Cond.AND]: [flowConditions ?? {}], - }; - } - return { countConditions, searchConditions }; + return { + [Cond.OR]: whereClauses, + }; } } diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index fe4e4a5b..3231cc8e 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -5,6 +5,7 @@ import { type FlowObjectFilters, type SearchFlowsFilters, } from '../../graphql/args'; +import { UniqueFlowEntity } from '../../model'; /* * Map structure: @@ -42,18 +43,10 @@ export function mapFlowObjectConditionsToWhereClause( } export function mapFlowCategoryConditionsToWhereClause( - shortcutFilter: any | null, flowCategoryConditions: FlowCategory[] ) { - let whereClause = {}; - - const shortcutsWhereClause = shortcutFilter ? shortcutFilter.where : null; - if (shortcutsWhereClause) { - whereClause = { - [Cond.OR]: [shortcutsWhereClause], - }; - } if (flowCategoryConditions.length > 0) { + let whereClause = {}; // Map category filters // getting Id when possible // or name and group otherwise @@ -102,9 +95,10 @@ export function mapFlowCategoryConditionsToWhereClause( ], }; } + return whereClause; } - return whereClause; + return undefined; } export function mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( @@ -217,3 +211,96 @@ export function mapFlowObjectConditions( return flowObjectsConditions; } + +export function mergeUniqueEntities( + listA: UniqueFlowEntity[], + listB: UniqueFlowEntity[] +): UniqueFlowEntity[] { + const entityMap = new Map(); + + for (const entity of listA.concat(listB)) { + const key = `${entity.id}_${entity.versionID}`; + if (!entityMap.has(key)) { + entityMap.set(key, entity); + } + } + + return Array.from(entityMap.values()); +} + +export function intersectUniqueFlowEntities( + ...lists: UniqueFlowEntity[][] +): UniqueFlowEntity[] { + // If any of the lists is empty, remove it + lists = lists.filter((list) => list.length > 0); + + if (lists.length === 0) return []; + + if (lists.length === 1) return lists[0]; + + // Helper function to create a string key for comparison + const createKey = (entity: UniqueFlowEntity) => + `${entity.id}_${entity.versionID}`; + + // Convert the first list into a set for efficient lookup + const initialSet = new Set(lists[0].map(createKey)); + + // Intersect the remaining lists with the initial set + for (let i = 1; i < lists.length; i++) { + const currentSet = new Set(lists[i].map(createKey)); + for (let key of initialSet) { + if (!currentSet.has(key)) { + initialSet.delete(key); + } + } + } + + // Convert the keys back to UniqueFlowEntity objects + return Array.from(initialSet).map((key) => { + const [id, versionID] = key.split('_').map(Number); + return { id, versionID } as UniqueFlowEntity; + }); +} + +export function sortEntitiesByReferenceList( + entities: UniqueFlowEntity[], + referenceList: UniqueFlowEntity[] +): UniqueFlowEntity[] { + // Create a map for quick lookup of index positions in referenceList + const indexMap = new Map(); + referenceList.forEach((entity, index) => { + const key = `${entity.id}_${entity.versionID}`; + indexMap.set(key, index); + }); + + // Sort the entities array based on the order in referenceList + return entities.sort((a, b) => { + const keyA = `${a.id}_${a.versionID}`; + const keyB = `${b.id}_${b.versionID}`; + const indexA = indexMap.get(keyA); + const indexB = indexMap.get(keyB); + + if (indexA !== undefined && indexB !== undefined) { + return indexA - indexB; + } else if (indexA !== undefined) { + return -1; // Prefer elements found in referenceList + } else { + return 1; + } + }); +} + +export function removeDuplicatesUniqueFlowEntities( + entities: UniqueFlowEntity[] +): UniqueFlowEntity[] { + const uniqueEntities = new Map(); + + entities.forEach((entity) => { + const key = `${entity.id}_${entity.versionID}`; + if (!uniqueEntities.has(key)) { + uniqueEntities.set(key, entity); + } + }); + + return Array.from(uniqueEntities.values()); +} diff --git a/src/utils/graphql/pagination.ts b/src/utils/graphql/pagination.ts index 4d77af65..99bfdd44 100644 --- a/src/utils/graphql/pagination.ts +++ b/src/utils/graphql/pagination.ts @@ -1,6 +1,6 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; -import { ArgsType, Field, ObjectType } from 'type-graphql'; +import { ArgsType, Field, Int, ObjectType } from 'type-graphql'; export type SortOrder = 'asc' | 'desc'; @@ -16,11 +16,11 @@ export class PageInfo { @Field({ nullable: false }) hasPreviousPage: boolean; - @Field({ nullable: false }) - prevPageCursor: string; + @Field(() => Int, { nullable: false }) + prevPageCursor: number; - @Field({ nullable: false }) - nextPageCursor: string; + @Field(() => Int, { nullable: false }) + nextPageCursor: number; @Field({ nullable: false }) pageSize: number; @@ -70,11 +70,11 @@ export class PaginationArgs { @Field({ nullable: false }) limit: number; - @Field({ nullable: true }) - nextPageCursor: string; + @Field(() => Int, { nullable: true }) + nextPageCursor: number; - @Field({ nullable: true }) - prevPageCursor: string; + @Field(() => Int, { nullable: true }) + prevPageCursor: number; @Field(() => String, { nullable: true }) sortField: TSortFields; From 7d6ca427ef12d8012a0ffb341c505906067687d5 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Wed, 10 Jan 2024 10:08:43 +0100 Subject: [PATCH 56/67] Add all flowObject properties --- src/domain-services/flows/model.ts | 1 - src/domain-services/location/graphql/types.ts | 44 ++++++++++++++++--- .../location/location-service.ts | 11 ++++- .../organizations/graphql/types.ts | 31 ++++++++++++- .../organizations/organization-service.ts | 9 ++++ src/domain-services/plans/graphql/types.ts | 42 ++++++++++++++++-- src/domain-services/plans/plan-service.ts | 13 ++++++ 7 files changed, 137 insertions(+), 14 deletions(-) diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 03400739..b253fb62 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -1,7 +1,6 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; -import Knex from 'knex'; import { type SortOrder } from '../../utils/graphql/pagination'; export type FlowEntity = InstanceDataOfModel; diff --git a/src/domain-services/location/graphql/types.ts b/src/domain-services/location/graphql/types.ts index 27f9ecec..7d42abec 100644 --- a/src/domain-services/location/graphql/types.ts +++ b/src/domain-services/location/graphql/types.ts @@ -1,6 +1,6 @@ import { Brand } from '@unocha/hpc-api-core/src/util/types'; import { MaxLength } from 'class-validator'; -import { Field, ID, Int, ObjectType, registerEnumType } from 'type-graphql'; +import { Field, ID, ObjectType, registerEnumType } from 'type-graphql'; import { BaseType } from '../../../utils/graphql/base-types'; export enum LocationStatus { @@ -25,7 +25,7 @@ export default class Location extends BaseType { @MaxLength(255) name?: string; - @Field(() => Int) + @Field(() => Number) adminLevel: number; // Accidentally optional @Field({ nullable: true }) @@ -34,7 +34,7 @@ export default class Location extends BaseType { @Field({ nullable: true }) longitude?: number; - @Field(() => Int, { nullable: true }) + @Field(() => Number, { nullable: true }) parentId?: number; @Field({ nullable: true }) @@ -47,7 +47,7 @@ export default class Location extends BaseType { @Field(() => LocationStatus) status?: LocationStatus; // Accidentally optional - @Field(() => Int, { nullable: true }) + @Field(() => Number, { nullable: true }) validOn?: number; @Field({ defaultValue: true }) @@ -56,12 +56,42 @@ export default class Location extends BaseType { @ObjectType() export class BaseLocation extends BaseType { - @Field({ nullable: false }) + @Field({ nullable: true }) id: number; - @Field(() => String, { nullable: false }) + @Field(() => String, { nullable: true }) name: string | null; - @Field({ nullable: false }) + @Field({ nullable: true }) direction: string; + + @Field({ nullable: true }) + externalId?: string; + + @Field(() => Number) + adminLevel: number | null; // Accidentally optional + + @Field(() => Number, { nullable: true }) + latitude: number | null; + + @Field(() => Number, { nullable: true }) + longitude: number | null; + + @Field(() => Number, { nullable: true }) + parentId: number | null; + + @Field(() => String, { nullable: true }) + iso3: string | null; + + @Field(() => String, { nullable: true }) + pcode: string | null; + + @Field(() => String) + status: string | null; // Accidentally optional + + @Field(() => Number, { nullable: true }) + validOn: string | number | null; + + @Field({ defaultValue: true }) + itosSync: boolean; // Accidentally optional } diff --git a/src/domain-services/location/location-service.ts b/src/domain-services/location/location-service.ts index c571ac9c..0261ddd1 100644 --- a/src/domain-services/location/location-service.ts +++ b/src/domain-services/location/location-service.ts @@ -78,13 +78,22 @@ export class LocationService { private mapLocationsToFlowLocations( location: InstanceDataOfModel, locationFO: InstanceDataOfModel - ) { + ): BaseLocation { return { id: location.id, name: location.name, direction: locationFO.refDirection, createdAt: location.createdAt.toISOString(), updatedAt: location.updatedAt.toISOString(), + adminLevel: location.adminLevel, + latitude: location.latitude, + longitude: location.longitude, + parentId: location.parentId, + iso3: location.iso3, + status: location.status, + validOn: location.validOn, + itosSync: location.itosSync, + pcode: location.pcode, }; } } diff --git a/src/domain-services/organizations/graphql/types.ts b/src/domain-services/organizations/graphql/types.ts index c4ebdb23..4e592032 100644 --- a/src/domain-services/organizations/graphql/types.ts +++ b/src/domain-services/organizations/graphql/types.ts @@ -7,11 +7,38 @@ export class Organization extends BaseType { id: number; @Field({ nullable: true }) - direction: string; + name: string; @Field({ nullable: true }) - name: string; + direction: string; @Field({ nullable: true }) abbreviation: string; + + @Field({ nullable: true }) + url: string; + + @Field({ nullable: true }) + parentID: number; + + @Field({ nullable: true }) + nativeName: string; + + @Field({ nullable: true }) + comments: string; + + @Field({ nullable: true }) + collectiveInd: string; + + @Field({ nullable: true }) + active: boolean; + + @Field({ nullable: true }) + newOrganisationId: number; + + @Field({ nullable: true }) + verified: boolean; + + @Field({ nullable: true }) + notes: string; } diff --git a/src/domain-services/organizations/organization-service.ts b/src/domain-services/organizations/organization-service.ts index f235a918..17bc6cc5 100644 --- a/src/domain-services/organizations/organization-service.ts +++ b/src/domain-services/organizations/organization-service.ts @@ -59,6 +59,15 @@ export class OrganizationService { createdAt: organization.createdAt.toISOString(), updatedAt: organization.updatedAt.toISOString(), abbreviation: organization.abbreviation, + url: organization.url, + parentID: organization.parentID, + nativeName: organization.nativeName, + comments: organization.comments, + collectiveInd: organization.collectiveInd, + active: organization.active, + newOrganisationId: organization.newOrganisationId, + verified: organization.verified, + notes: organization.notes, }; } } diff --git a/src/domain-services/plans/graphql/types.ts b/src/domain-services/plans/graphql/types.ts index 9a6fe4fc..c12b7760 100644 --- a/src/domain-services/plans/graphql/types.ts +++ b/src/domain-services/plans/graphql/types.ts @@ -99,12 +99,48 @@ export default class Plan { @ObjectType() export class BasePlan extends BaseType { - @Field({ nullable: false }) + @Field({ nullable: true }) id: number; - @Field({ nullable: false }) + @Field({ nullable: true }) name: string; - @Field({ nullable: false }) + @Field({ nullable: true }) direction: string; + + @Field({ nullable: true }) + startDate: string; + + @Field({ nullable: true }) + endDate: string; + + @Field(() => String, { nullable: true }) + comments: string | null; + + @Field({ nullable: true }) + isForHPCProjects: boolean; + + @Field(() => String, { nullable: true }) + code: string | null; + + @Field(() => String, { nullable: true }) + customLocationCode: string | null; + + @Field(() => Number, { nullable: true }) + currentReportingPeriodId: number | null; + + @Field({ nullable: true }) + currentVersion: boolean; + + @Field({ nullable: true }) + latestVersion: boolean; + + @Field({ nullable: true }) + latestTaggedVersion: boolean; + + @Field(() => Number, { nullable: true }) + lastPublishedReportingPeriodId: number | null; + + @Field(() => String, { nullable: true }) + clusterSelectionType: string | null; } diff --git a/src/domain-services/plans/plan-service.ts b/src/domain-services/plans/plan-service.ts index 2920b8c9..a5476825 100644 --- a/src/domain-services/plans/plan-service.ts +++ b/src/domain-services/plans/plan-service.ts @@ -112,6 +112,19 @@ export class PlanService { createdAt: plan.createdAt.toISOString(), updatedAt: plan.updatedAt.toISOString(), direction: direction ?? '', + startDate: planVersion.startDate.toISOString(), + endDate: planVersion.endDate.toISOString(), + comments: planVersion.comments, + isForHPCProjects: planVersion.isForHPCProjects, + code: planVersion.code, + customLocationCode: planVersion.customLocationCode, + currentReportingPeriodId: planVersion.currentReportingPeriodId, + currentVersion: planVersion.currentVersion, + latestVersion: planVersion.latestVersion, + latestTaggedVersion: planVersion.latestTaggedVersion, + lastPublishedReportingPeriodId: + planVersion.lastPublishedReportingPeriodId, + clusterSelectionType: planVersion.clusterSelectionType, }; } } From 4d5719eefdf2a8ebae6a674ea84df7f17b16dc07 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Wed, 10 Jan 2024 19:37:22 +0100 Subject: [PATCH 57/67] Temp: performance --- src/domain-services/flows/flow-service.ts | 27 +++++++- src/domain-services/flows/model.ts | 4 +- ...-flow-category-conditions-strategy-impl.ts | 60 ------------------ .../search-flow-by-filters-strategy-impl.ts | 61 ++++++++++++------- .../flows/strategy/impl/utils.ts | 25 ++++++++ 5 files changed, 93 insertions(+), 84 deletions(-) diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index ca3f6982..f512a842 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -4,6 +4,7 @@ import { Service } from 'typedi'; import { type FlowObjectType } from '../flow-object/model'; import { GetFlowsArgs, UniqueFlowEntity, type FlowOrderBy } from './model'; import { + applySearchFilters, mapFlowOrderBy, removeDuplicatesUniqueFlowEntities, } from './strategy/impl/utils'; @@ -15,7 +16,7 @@ export class FlowService { async getFlows(args: GetFlowsArgs) { const { models, conditions, offset, orderBy, limit } = args; - return await models.flow.find({ + return await models!.flow.find({ orderBy, limit, where: conditions, @@ -23,6 +24,30 @@ export class FlowService { }); } + async getFlowsAsUniqueFlowEntity(args: GetFlowsArgs): Promise { + const { databaseConnection, orderBy, conditions } = args; + + let query = databaseConnection!.queryBuilder() + .distinct('id', 'versionID', orderBy.column) // Include orderBy.column in the distinct selection + .select('id', 'versionID') + .from('flow') + .whereNull('deletedAt') + .orderBy(orderBy.column, orderBy.order); + + if(conditions) { + query = applySearchFilters(query, conditions); + } + + const flows = await query; + + const mapFlowsToUniqueFlowEntities = flows.map((flow) => ({ + id: flow.id, + versionID: flow.versionID, + })); + + return mapFlowsToUniqueFlowEntities; + } + async getFlowsCount(models: Database, conditions: any) { return await models.flow.count({ where: conditions }); } diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index b253fb62..024fe175 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -2,6 +2,7 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { type SortOrder } from '../../utils/graphql/pagination'; +import Knex from 'knex'; export type FlowEntity = InstanceDataOfModel; @@ -21,7 +22,8 @@ export type FlowOrderBy = { export type FlowNestedDirection = 'source' | 'destination'; export type GetFlowsArgs = { - models: Database; + models?: Database; + databaseConnection?: Knex; conditions: any; offset?: number; orderBy?: any; diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index 36f437bf..7624c42e 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -104,7 +104,6 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl } const flows = await query; - console.log('flows', flows.length); const mapFlows: UniqueFlowEntity[] = flows.map( (flow) => ({ @@ -114,65 +113,6 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl ); return { flows: mapFlows }; - // const whereClauseCategoryRef = { - // [Cond.OR]: [ - // categoriesIds.length > 0 - // ? { - // [Cond.AND]: [ - // { - // categoryID: { - // [Op.IN]: categoriesIds, - // }, - // }, - // { objectType: 'flow' }, - // ], - // } - // : {}, - // categoriesIdsFromShortcutFilterIN.length > 0 - // ? { - // [Cond.AND]: [ - // { - // categoryID: { - // [Op.IN]: categoriesIdsFromShortcutFilterIN, - // }, - // }, - // { objectType: 'flow' }, - // ], - // } - // : {}, - // categoriesIdsFromShortcutFilterNOTIN.length > 0 - // ? { - // [Cond.AND]: [ - // { - // categoryID: { - // [Op.NOT_IN]: categoriesIdsFromShortcutFilterNOTIN, - // }, - // }, - // { objectType: 'flow' }, - // ], - // } - // : {}, - // ], - // }; - - // const categoryRefs = await this.categoryService.findCategoryRefs( - // models, - // whereClauseCategoryRef - // ); - - // Map category refs to flow IDs - // keep only unique values - // and return the list of flow IDs - // const mapFlowsToUniqueFlowEntities = categoryRefs.map((categoryRef) => { - // return { - // id: categoryRef.objectID, - // versionID: categoryRef.versionID, - // } as UniqueFlowEntity; - // }) - - // const flows = removeDuplicatesUniqueFlowEntities(mapFlowsToUniqueFlowEntities); - - // return { flows }; } generateWhereClause( diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 0ba3cb69..ea65ce14 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -1,8 +1,7 @@ -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; -import { UniqueFlowEntity } from '../../model'; +import { FlowEntity, UniqueFlowEntity } from '../../model'; import { type FlowSearchArgs, type FlowSearchStrategy, @@ -13,10 +12,8 @@ import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow import { GetFlowIdsFromObjectConditionsStrategyImpl } from './get-flowIds-flow-object-conditions-strategy-impl'; import { intersectUniqueFlowEntities, - mapCountResultToCountObject, mapFlowObjectConditions, mapFlowOrderBy, - mergeUniqueEntities, prepareFlowConditions, sortEntitiesByReferenceList, } from './utils'; @@ -63,30 +60,32 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // In this case we fetch the list of flows from the database // using the orderBy // We can also filter by flowFilters - const flowConditions = prepareFlowConditions(flowFilters); + const orderByForFlow = mapFlowOrderBy(orderBy); - const flowsToSort = await this.flowService.getFlows({ - models, - conditions: flowConditions, - orderBy: { column: orderBy.column, order: orderBy.order }, + const flowsToSort: UniqueFlowEntity[] = await this.flowService.getFlowsAsUniqueFlowEntity({ + databaseConnection, + conditions: flowFilters, + orderBy: orderByForFlow, }); - const flowIDsFromSortingEntity: UniqueFlowEntity[] = flowsToSort.map( - (flow) => ({ id: flow.id, versionID: flow.versionID }) - ); // Since there can be many flowIDs returned // This can cause 'Maximum call stack size exceeded' error // When using the spread operator - a workaround is to use push fot each element - for (const flow of flowIDsFromSortingEntity) { - sortByFlowIDs.push(flow); + // also, we need to map the FlowEntity to UniqueFlowEntity + for (const flow of flowsToSort) { + const uniqueFlowEntity: UniqueFlowEntity = { + id: flow.id, + versionID: flow.versionID, + }; + sortByFlowIDs.push(uniqueFlowEntity); } } // Now we need to check if we need to filter by category // if it's using any of the shorcuts // or if there are any flowCategoryFilters - const isSearchByCategoryShotcut = shortcutFilter !== null; - + const isSearchByCategoryShotcut = shortcutFilter !== null && shortcutFilter.length > 0; + const isFilterByCategory = isSearchByCategoryShotcut || flowCategoryFilters?.length > 0; @@ -101,6 +100,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { shortcutFilter, flowObjectsConditions: undefined, }); + // Since there can be many flowIDs returned // This can cause 'Maximum call stack size exceeded' error // When using the spread operator - a workaround is to use push fot each element @@ -123,7 +123,13 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { models, flowObjectsConditions: flowObjectConditionsMap, }); - flowsFromObjectFilters.push(...flows); + + // Since there can be many flowIDs returned + // This can cause 'Maximum call stack size exceeded' error + // When using the spread operator - a workaround is to use push fot each element + for (const flow of flows) { + flowsFromObjectFilters.push(flow); + } } // Lastly, we need to check if we need to filter by flow @@ -134,13 +140,22 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { const flowsFromFlowFilters: UniqueFlowEntity[] = []; if (isSortByEntity && isFilterByFlow) { const flowConditions = prepareFlowConditions(flowFilters); - const flows = await this.flowService.getFlows({ + const flows: FlowEntity[] = await this.flowService.getFlows({ models, conditions: flowConditions, orderBy: { column: orderBy.column, order: orderBy.order }, }); + + // Since there can be many flowIDs returned + // This can cause 'Maximum call stack size exceeded' error + // When using the spread operator - a workaround is to use push fot each element + // also, we need to map the FlowEntity to UniqueFlowEntity for (const flow of flows) { - flowsFromFlowFilters.push({ id: flow.id, versionID: flow.versionID }); + const uniqueFlowEntity: UniqueFlowEntity = { + id: flow.id, + versionID: flow.versionID, + }; + sortByFlowIDs.push(uniqueFlowEntity); } } @@ -165,6 +180,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { sortByFlowIDs ); + // Then we are going to slice the flows using the limit and offset const reducedFlows: UniqueFlowEntity[] = sortedFlows.slice( offset, @@ -174,11 +190,12 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // Once the list of elements is reduced, we need to build the conditions const searchConditions = this.buildConditions(reducedFlows); + const orderByForFlow = mapFlowOrderBy(orderBy); + const flows = await this.flowService.getFlows({ models, conditions: searchConditions, - limit, - orderBy: { column: orderBy.column, order: orderBy.order }, + orderBy: orderByForFlow, }); return { flows, count }; diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index 3231cc8e..72c8603a 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -6,6 +6,7 @@ import { type SearchFlowsFilters, } from '../../graphql/args'; import { UniqueFlowEntity } from '../../model'; +import Knex from 'knex'; /* * Map structure: @@ -304,3 +305,27 @@ export function removeDuplicatesUniqueFlowEntities( return Array.from(uniqueEntities.values()); } + +export function applySearchFilters(query: Knex.QueryBuilder, filters: SearchFlowsFilters): Knex.QueryBuilder { + // Check if 'id' filter is defined and apply it + if (filters.id !== null && filters.id !== undefined) { + query.whereIn('id', filters.id); + } + + // Check if 'activeStatus' filter is defined and apply it + if (filters.activeStatus !== null && filters.activeStatus !== undefined) { + query.andWhere('activeStatus', filters.activeStatus); + } + + // Check if 'amountUSD' filter is defined and apply it + if (filters.amountUSD !== null && filters.amountUSD !== undefined) { + query.andWhere('amountUSD', filters.amountUSD); + } + + // Check if 'restricted' filter is defined and apply it + if (filters.restricted !== null && filters.restricted !== undefined) { + query.andWhere('restricted', filters.restricted); + } + + return query; +} \ No newline at end of file From 2bfa08c3331f050822e632e3a2fa238b404b3cfd Mon Sep 17 00:00:00 2001 From: manelcecs Date: Wed, 17 Jan 2024 12:05:00 +0100 Subject: [PATCH 58/67] Allow filtering by no-flowObject properties --- .../flows/flow-search-service.ts | 17 ++++- src/domain-services/flows/flow-service.ts | 27 +++++--- src/domain-services/flows/graphql/args.ts | 31 +++++---- src/domain-services/flows/graphql/resolver.ts | 1 - src/domain-services/flows/model.ts | 2 +- .../flows/strategy/flow-search-strategy.ts | 2 + .../flows/strategy/flowID-search-strategy.ts | 14 ++-- ...-flow-category-conditions-strategy-impl.ts | 30 +++----- ...-from-nested-flow-filters-strategy-impl.ts | 69 +++++++++++++++++++ ...ds-flow-object-conditions-strategy-impl.ts | 12 +--- .../search-flow-by-filters-strategy-impl.ts | 47 ++++++++++--- .../flows/strategy/impl/utils.ts | 42 ++++++----- .../report-details/report-detail-service.ts | 55 +++++++++++++++ tests/unit/flow-search-service.spec.ts | 5 -- 14 files changed, 249 insertions(+), 105 deletions(-) create mode 100644 src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 1c34d314..b96599ce 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -24,6 +24,7 @@ import { SearchFlowsFilters, type FlowCategory, type FlowObjectFilters, + type NestedFlowFilters, type SearchFlowsArgs, type SearchFlowsArgsNonPaginated, } from './graphql/args'; @@ -72,6 +73,7 @@ export class FlowSearchService { sortField, sortOrder, shouldIncludeChildrenOfParkedFlows, + nestedFlowFilters, } = filters; const orderBy: FlowOrderBy = this.buildOrderBy(sortField, sortOrder); @@ -110,6 +112,7 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, + nestedFlowFilters, shortcutFilter, orderBy ); @@ -132,6 +135,7 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, + nestedFlowFilters, // shortcuts for categories shortcutFilter, }); @@ -353,10 +357,11 @@ export class FlowSearchService { flowFilters: SearchFlowsFilters, flowObjectFilters: FlowObjectFilters[], flowCategoryFilters: FlowCategory[], + nestedFlowFilters: NestedFlowFilters, shortcutFilter: any | null, orderBy?: FlowOrderBy ) { - // If there are no filters (flowFilters, flowObjectFilters, flowCategoryFilters or pending) + // If there are no filters (flowFilters, flowObjectFilters, flowCategoryFilters, nestedFlowFilters or shortcutFilter) // and there is no sortByEntity (orderBy.entity === 'flow') // use onlyFlowFiltersStrategy // If there are no sortByEntity (orderBy.entity === 'flow') @@ -366,6 +371,7 @@ export class FlowSearchService { const isFlowFiltersDefined = flowFilters !== undefined; const isFlowObjectFiltersDefined = flowObjectFilters !== undefined; const isFlowCategoryFiltersDefined = flowCategoryFilters !== undefined; + const isNestedFlowFiltersDefined = nestedFlowFilters !== undefined; // Shortcuts fot categories const isFilterByShortcutsDefined = shortcutFilter !== null; @@ -373,13 +379,15 @@ export class FlowSearchService { !isFlowFiltersDefined && !isFlowObjectFiltersDefined && !isFlowCategoryFiltersDefined && - !isFilterByShortcutsDefined; + !isFilterByShortcutsDefined && + !isNestedFlowFiltersDefined; const isFlowFiltersOnly = isFlowFiltersDefined && !isFlowObjectFiltersDefined && !isFlowCategoryFiltersDefined && - !isFilterByShortcutsDefined; + !isFilterByShortcutsDefined && + !isNestedFlowFiltersDefined; if (isOrderByEntityFlow && (isNoFilterDefined || isFlowFiltersOnly)) { // Use onlyFlowFiltersStrategy @@ -761,6 +769,7 @@ export class FlowSearchService { parked: isParkedFlows, pass_through: isPassThroughFlows, standard: isStandardFlows, + nestedFlowFilters, } = args; if (!flowFilters) { @@ -794,6 +803,7 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, + nestedFlowFilters, shortcutFilter ); @@ -803,6 +813,7 @@ export class FlowSearchService { flowFilters, flowObjectFilters, flowCategoryFilters, + nestedFlowFilters, // shortcuts for categories shortcutFilter, }); diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index f512a842..42202a6a 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -2,7 +2,11 @@ import { type Database } from '@unocha/hpc-api-core/src/db/type'; import type Knex from 'knex'; import { Service } from 'typedi'; import { type FlowObjectType } from '../flow-object/model'; -import { GetFlowsArgs, UniqueFlowEntity, type FlowOrderBy } from './model'; +import { + type FlowOrderBy, + type GetFlowsArgs, + type UniqueFlowEntity, +} from './model'; import { applySearchFilters, mapFlowOrderBy, @@ -24,17 +28,20 @@ export class FlowService { }); } - async getFlowsAsUniqueFlowEntity(args: GetFlowsArgs): Promise { + async getFlowsAsUniqueFlowEntity( + args: GetFlowsArgs + ): Promise { const { databaseConnection, orderBy, conditions } = args; - let query = databaseConnection!.queryBuilder() - .distinct('id', 'versionID', orderBy.column) // Include orderBy.column in the distinct selection - .select('id', 'versionID') - .from('flow') - .whereNull('deletedAt') - .orderBy(orderBy.column, orderBy.order); - - if(conditions) { + let query = databaseConnection! + .queryBuilder() + .distinct('id', 'versionID', orderBy.column) // Include orderBy.column in the distinct selection + .select('id', 'versionID') + .from('flow') + .whereNull('deletedAt') + .orderBy(orderBy.column, orderBy.order); + + if (conditions) { query = applySearchFilters(query, conditions); } diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 02bb4662..c90b611d 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -10,26 +10,25 @@ export class SearchFlowsFilters { @Field(() => Boolean, { nullable: true }) activeStatus: boolean | null; - @Field(() => String, { nullable: true }) - status: 'commitment' | 'paid' | 'pledged' | null; - - @Field(() => String, { nullable: true }) - type: 'carryover' | 'parked' | 'pass_through' | 'standard' | null; - @Field(() => Number, { nullable: true }) amountUSD: number | null; - @Field(() => Number, { name: 'reporterRefCode', nullable: true }) - reporterReferenceCode: number | null; + @Field(() => Boolean, { nullable: true }) + restricted: boolean | null; + + constructor() {} +} + +@InputType() +export class NestedFlowFilters { + @Field(() => String, { name: 'reporterRefCode', nullable: true }) + reporterReferenceCodes: string[] | null; @Field(() => Number, { name: 'sourceSystemID', nullable: true }) - sourceSystemId: number | null; + sourceIDs: string[] | null; @Field(() => Number, { name: 'legacyID', nullable: true }) - legacyId: number | null; - - @Field(() => Boolean, { nullable: true }) - restricted: boolean | null; + legacyId: number[] | null; constructor() {} } @@ -80,6 +79,9 @@ export class SearchFlowsArgs extends PaginationArgs { @Field(() => [FlowObjectFilters], { nullable: true }) flowObjectFilters: FlowObjectFilters[]; + @Field(() => NestedFlowFilters, { nullable: true }) + nestedFlowFilters: NestedFlowFilters; + @Field({ name: 'includeChildrenOfParkedFlows', nullable: true }) shouldIncludeChildrenOfParkedFlows: boolean; @@ -119,6 +121,9 @@ export class SearchFlowsArgsNonPaginated { @Field(() => [FlowObjectFilters], { nullable: true }) flowObjectFilters: FlowObjectFilters[]; + @Field(() => NestedFlowFilters, { nullable: true }) + nestedFlowFilters: NestedFlowFilters; + @Field({ name: 'includeChildrenOfParkedFlows', nullable: true }) shouldIncludeChildrenOfParkedFlows: boolean; diff --git a/src/domain-services/flows/graphql/resolver.ts b/src/domain-services/flows/graphql/resolver.ts index 4f3e1df2..78c4d9d5 100644 --- a/src/domain-services/flows/graphql/resolver.ts +++ b/src/domain-services/flows/graphql/resolver.ts @@ -4,7 +4,6 @@ import Context from '../../Context'; import { FlowSearchService } from '../flow-search-service'; import { SearchFlowsArgs, SearchFlowsArgsNonPaginated } from './args'; import { - BaseFlow, Flow, FlowSearchResult, FlowSearchResultNonPaginated, diff --git a/src/domain-services/flows/model.ts b/src/domain-services/flows/model.ts index 024fe175..b3b3e1e7 100644 --- a/src/domain-services/flows/model.ts +++ b/src/domain-services/flows/model.ts @@ -1,8 +1,8 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import type Knex from 'knex'; import { type SortOrder } from '../../utils/graphql/pagination'; -import Knex from 'knex'; export type FlowEntity = InstanceDataOfModel; diff --git a/src/domain-services/flows/strategy/flow-search-strategy.ts b/src/domain-services/flows/strategy/flow-search-strategy.ts index 127eef67..191dfc99 100644 --- a/src/domain-services/flows/strategy/flow-search-strategy.ts +++ b/src/domain-services/flows/strategy/flow-search-strategy.ts @@ -3,6 +3,7 @@ import type Knex from 'knex'; import { type FlowCategory, type FlowObjectFilters, + type NestedFlowFilters, type SearchFlowsFilters, } from '../graphql/args'; import { type FlowEntity } from '../model'; @@ -18,6 +19,7 @@ export interface FlowSearchArgs { flowFilters: SearchFlowsFilters; flowObjectFilters: FlowObjectFilters[]; flowCategoryFilters: FlowCategory[]; + nestedFlowFilters: NestedFlowFilters; shortcutFilter: any; limit?: number; offset?: number; diff --git a/src/domain-services/flows/strategy/flowID-search-strategy.ts b/src/domain-services/flows/strategy/flowID-search-strategy.ts index b219299c..2770715e 100644 --- a/src/domain-services/flows/strategy/flowID-search-strategy.ts +++ b/src/domain-services/flows/strategy/flowID-search-strategy.ts @@ -1,7 +1,6 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import Knex from 'knex'; -import { type FlowCategory } from '../graphql/args'; +import type Knex from 'knex'; +import { type FlowCategory, type NestedFlowFilters } from '../graphql/args'; import { type UniqueFlowEntity } from '../model'; export interface FlowIdSearchStrategyResponse { @@ -9,19 +8,14 @@ export interface FlowIdSearchStrategyResponse { } export interface FlowIdSearchStrategyArgs { - databaseConnection: Knex; + databaseConnection?: Knex; models: Database; flowObjectsConditions?: Map>; flowCategoryConditions?: FlowCategory[]; + nestedFlowFilters?: NestedFlowFilters; shortcutFilter?: any[] | null; } export interface FlowIDSearchStrategy { search(args: FlowIdSearchStrategyArgs): Promise; - - generateWhereClause( - flowIds: FlowId[], - conditions: any, - filterByPendingFlows?: boolean - ): any; } diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts index 7624c42e..da9efa22 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-category-conditions-strategy-impl.ts @@ -1,19 +1,15 @@ import { type CategoryId } from '@unocha/hpc-api-core/src/db/models/category'; -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { CategoryService } from '../../../categories/category-service'; -import { UniqueFlowEntity } from '../../model'; +import { type UniqueFlowEntity } from '../../model'; import { type FlowIDSearchStrategy, type FlowIdSearchStrategyArgs, type FlowIdSearchStrategyResponse, } from '../flowID-search-strategy'; -import { - mapFlowCategoryConditionsToWhereClause, - removeDuplicatesUniqueFlowEntities, -} from './utils'; +import { mapFlowCategoryConditionsToWhereClause } from './utils'; @Service() export class GetFlowIdsFromCategoryConditionsStrategyImpl @@ -21,7 +17,10 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl { constructor(private readonly categoryService: CategoryService) {} - private readonly categoryIDsMap: Map = new Map([ + private readonly categoryIDsMap: Map = new Map< + string, + number + >([ ['Pending', 45], ['Pledge', 46], ['Commitment', 47], @@ -79,7 +78,7 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl } } - let query = databaseConnection + let query = databaseConnection! .queryBuilder() .distinct('objectID', 'versionID') .select('objectID', 'versionID') @@ -114,17 +113,4 @@ export class GetFlowIdsFromCategoryConditionsStrategyImpl return { flows: mapFlows }; } - - generateWhereClause( - flowIds: FlowId[], - _conditions: any, - filterByPendingFlows: boolean - ) { - const operation = filterByPendingFlows === true ? Op.IN : Op.NOT_IN; - return { - id: { - [operation]: flowIds, - }, - }; - } } diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts new file mode 100644 index 00000000..1adb91d7 --- /dev/null +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts @@ -0,0 +1,69 @@ +import { Service } from 'typedi'; +import { ReportDetailService } from '../../../report-details/report-detail-service'; +import { type UniqueFlowEntity } from '../../model'; +import { + type FlowIDSearchStrategy, + type FlowIdSearchStrategyArgs, + type FlowIdSearchStrategyResponse, +} from '../flowID-search-strategy'; +import { intersectUniqueFlowEntities } from './utils'; + +@Service() +export class GetFlowIdsFromNestedFlowFiltersStrategyImpl + implements FlowIDSearchStrategy +{ + constructor(private readonly reportDetailService: ReportDetailService) {} + + async search( + args: FlowIdSearchStrategyArgs + ): Promise { + const { models, nestedFlowFilters } = args; + + let flowsReporterReferenceCode: UniqueFlowEntity[] = []; + let flowsSourceSystemId: UniqueFlowEntity[] = []; + const flowsLegacyId: UniqueFlowEntity[] = []; + + // Get the flowIDs using 'reporterReferenceCode' + if ( + nestedFlowFilters?.reporterReferenceCodes && + nestedFlowFilters?.reporterReferenceCodes.length > 0 + ) { + flowsReporterReferenceCode = + await this.reportDetailService.getUniqueFlowIDsFromReportDetailsByReporterReferenceCode( + models, + nestedFlowFilters.reporterReferenceCodes + ); + } + + // Get the flowIDs using 'sourceSystemID' + if ( + nestedFlowFilters?.sourceIDs && + nestedFlowFilters?.sourceIDs.length > 0 + ) { + flowsSourceSystemId = + await this.reportDetailService.getUniqueFlowIDsFromReportDetailsBySourceID( + models, + nestedFlowFilters.sourceIDs + ); + } + + // TODO: Get the flowIDs using 'legacyID' + // if(nestedFlowFilters?.legacyId) { + // flowsLegacyId = await this.flowService.getFlowIDsFromSourceSystemID( + // models, + // databaseConnection, + // nestedFlowFilters.sourceSystemId + // ); + // } + + // Intersect the flowIDs from the nestedFlowFilters + const flowIDsFromNestedFlowFilters: UniqueFlowEntity[] = + intersectUniqueFlowEntities( + flowsReporterReferenceCode, + flowsSourceSystemId, + flowsLegacyId + ); + + return { flows: flowIDsFromNestedFlowFilters }; + } +} diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts index e6d258f7..61a0c7bd 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-object-conditions-strategy-impl.ts @@ -1,8 +1,6 @@ -import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowObjectService } from '../../../flow-object/flow-object-service'; -import { UniqueFlowEntity } from '../../model'; +import { type UniqueFlowEntity } from '../../model'; import { type FlowIDSearchStrategy, type FlowIdSearchStrategyArgs, @@ -37,12 +35,4 @@ export class GetFlowIdsFromObjectConditionsStrategyImpl return { flows: flowsFromFilteredFlowObjects }; } - - generateWhereClause(flowIds: FlowId[]) { - return { - id: { - [Op.IN]: flowIds, - }, - }; - } } diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index ea65ce14..2e7e2641 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -1,7 +1,7 @@ import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; -import { FlowEntity, UniqueFlowEntity } from '../../model'; +import { type FlowEntity, type UniqueFlowEntity } from '../../model'; import { type FlowSearchArgs, type FlowSearchStrategy, @@ -9,6 +9,7 @@ import { } from '../flow-search-strategy'; import { type FlowIdSearchStrategyResponse } from '../flowID-search-strategy'; import { GetFlowIdsFromCategoryConditionsStrategyImpl } from './get-flowIds-flow-category-conditions-strategy-impl'; +import { GetFlowIdsFromNestedFlowFiltersStrategyImpl } from './get-flowIds-flow-from-nested-flow-filters-strategy-impl'; import { GetFlowIdsFromObjectConditionsStrategyImpl } from './get-flowIds-flow-object-conditions-strategy-impl'; import { intersectUniqueFlowEntities, @@ -23,7 +24,8 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { constructor( private readonly flowService: FlowService, private readonly getFlowIdsFromCategoryConditions: GetFlowIdsFromCategoryConditionsStrategyImpl, - private readonly getFlowIdsFromObjectConditions: GetFlowIdsFromObjectConditionsStrategyImpl + private readonly getFlowIdsFromObjectConditions: GetFlowIdsFromObjectConditionsStrategyImpl, + private readonly getFlowIdsFromNestedFlowFilters: GetFlowIdsFromNestedFlowFiltersStrategyImpl ) {} async search(args: FlowSearchArgs): Promise { @@ -33,6 +35,7 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { flowFilters, flowObjectFilters, flowCategoryFilters, + nestedFlowFilters, orderBy, limit, offset, @@ -62,11 +65,12 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { // We can also filter by flowFilters const orderByForFlow = mapFlowOrderBy(orderBy); - const flowsToSort: UniqueFlowEntity[] = await this.flowService.getFlowsAsUniqueFlowEntity({ - databaseConnection, - conditions: flowFilters, - orderBy: orderByForFlow, - }); + const flowsToSort: UniqueFlowEntity[] = + await this.flowService.getFlowsAsUniqueFlowEntity({ + databaseConnection, + conditions: flowFilters, + orderBy: orderByForFlow, + }); // Since there can be many flowIDs returned // This can cause 'Maximum call stack size exceeded' error @@ -81,11 +85,32 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { } } + // We need to fetch the flowIDs by the nestedFlowFilters + // if there are any + const isFilterByNestedFilters = nestedFlowFilters !== undefined; + const flowIDsFromNestedFlowFilters: UniqueFlowEntity[] = []; + + if (isFilterByNestedFilters) { + const { flows }: FlowIdSearchStrategyResponse = + await this.getFlowIdsFromNestedFlowFilters.search({ + models, + nestedFlowFilters, + }); + + // Since there can be many flowIDs returned + // This can cause 'Maximum call stack size exceeded' error + // When using the spread operator - a workaround is to use push fot each element + for (const flow of flows) { + flowIDsFromNestedFlowFilters.push(flow); + } + } + // Now we need to check if we need to filter by category // if it's using any of the shorcuts // or if there are any flowCategoryFilters - const isSearchByCategoryShotcut = shortcutFilter !== null && shortcutFilter.length > 0; - + const isSearchByCategoryShotcut = + shortcutFilter !== null && shortcutFilter.length > 0; + const isFilterByCategory = isSearchByCategoryShotcut || flowCategoryFilters?.length > 0; @@ -165,7 +190,8 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { flowsFromCategoryFilters, flowsFromObjectFilters, flowsFromFlowFilters, - sortByFlowIDs + sortByFlowIDs, + flowIDsFromNestedFlowFilters ); // Obtain the count of the flows that match the filters @@ -180,7 +206,6 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { sortByFlowIDs ); - // Then we are going to slice the flows using the limit and offset const reducedFlows: UniqueFlowEntity[] = sortedFlows.slice( offset, diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index 72c8603a..f0a0378e 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -1,12 +1,12 @@ import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Cond, Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import type Knex from 'knex'; import { type FlowCategory, type FlowObjectFilters, type SearchFlowsFilters, } from '../../graphql/args'; -import { UniqueFlowEntity } from '../../model'; -import Knex from 'knex'; +import { type UniqueFlowEntity } from '../../model'; /* * Map structure: @@ -99,7 +99,7 @@ export function mapFlowCategoryConditionsToWhereClause( return whereClause; } - return undefined; + return {}; } export function mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( @@ -219,14 +219,14 @@ export function mergeUniqueEntities( ): UniqueFlowEntity[] { const entityMap = new Map(); - for (const entity of listA.concat(listB)) { + for (const entity of [...listA, ...listB]) { const key = `${entity.id}_${entity.versionID}`; if (!entityMap.has(key)) { entityMap.set(key, entity); } } - return Array.from(entityMap.values()); + return [...entityMap.values()]; } export function intersectUniqueFlowEntities( @@ -235,9 +235,13 @@ export function intersectUniqueFlowEntities( // If any of the lists is empty, remove it lists = lists.filter((list) => list.length > 0); - if (lists.length === 0) return []; + if (lists.length === 0) { + return []; + } - if (lists.length === 1) return lists[0]; + if (lists.length === 1) { + return lists[0]; + } // Helper function to create a string key for comparison const createKey = (entity: UniqueFlowEntity) => @@ -249,7 +253,7 @@ export function intersectUniqueFlowEntities( // Intersect the remaining lists with the initial set for (let i = 1; i < lists.length; i++) { const currentSet = new Set(lists[i].map(createKey)); - for (let key of initialSet) { + for (const key of initialSet) { if (!currentSet.has(key)) { initialSet.delete(key); } @@ -257,7 +261,7 @@ export function intersectUniqueFlowEntities( } // Convert the keys back to UniqueFlowEntity objects - return Array.from(initialSet).map((key) => { + return [...initialSet].map((key) => { const [id, versionID] = key.split('_').map(Number); return { id, versionID } as UniqueFlowEntity; }); @@ -269,10 +273,10 @@ export function sortEntitiesByReferenceList( ): UniqueFlowEntity[] { // Create a map for quick lookup of index positions in referenceList const indexMap = new Map(); - referenceList.forEach((entity, index) => { + for (const [index, entity] of referenceList.entries()) { const key = `${entity.id}_${entity.versionID}`; indexMap.set(key, index); - }); + } // Sort the entities array based on the order in referenceList return entities.sort((a, b) => { @@ -285,9 +289,8 @@ export function sortEntitiesByReferenceList( return indexA - indexB; } else if (indexA !== undefined) { return -1; // Prefer elements found in referenceList - } else { - return 1; } + return 1; }); } @@ -296,17 +299,20 @@ export function removeDuplicatesUniqueFlowEntities( ): UniqueFlowEntity[] { const uniqueEntities = new Map(); - entities.forEach((entity) => { + for (const entity of entities) { const key = `${entity.id}_${entity.versionID}`; if (!uniqueEntities.has(key)) { uniqueEntities.set(key, entity); } - }); + } - return Array.from(uniqueEntities.values()); + return [...uniqueEntities.values()]; } -export function applySearchFilters(query: Knex.QueryBuilder, filters: SearchFlowsFilters): Knex.QueryBuilder { +export function applySearchFilters( + query: Knex.QueryBuilder, + filters: SearchFlowsFilters +): Knex.QueryBuilder { // Check if 'id' filter is defined and apply it if (filters.id !== null && filters.id !== undefined) { query.whereIn('id', filters.id); @@ -328,4 +334,4 @@ export function applySearchFilters(query: Knex.QueryBuilder, filters: SearchFlow } return query; -} \ No newline at end of file +} diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index 4c8c0fa5..7ee13b31 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -2,8 +2,10 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; import { type Category } from '../categories/graphql/types'; +import { type UniqueFlowEntity } from '../flows/model'; import { type ReportDetail } from './graphql/types'; @Service() export class ReportDetailService { @@ -81,4 +83,57 @@ export class ReportDetailService { } return reportDetails; } + + async getUniqueFlowIDsFromReportDetailsByReporterReferenceCode( + models: Database, + reporterReferenceCodes: string[] + ): Promise { + const reportDetails: Array> = + await models.reportDetail.find({ + where: { + refCode: { + [Op.IN]: reporterReferenceCodes, + }, + }, + }); + + const flowIDs: UniqueFlowEntity[] = []; + + for (const reportDetail of reportDetails) { + flowIDs.push(this.mapReportDetailToUniqueFlowEntity(reportDetail)); + } + + return flowIDs; + } + + async getUniqueFlowIDsFromReportDetailsBySourceID( + models: Database, + sourceIDs: string[] + ): Promise { + const reportDetails: Array> = + await models.reportDetail.find({ + where: { + sourceID: { + [Op.IN]: sourceIDs, + }, + }, + }); + + const flowIDs: UniqueFlowEntity[] = []; + + for (const reportDetail of reportDetails) { + flowIDs.push(this.mapReportDetailToUniqueFlowEntity(reportDetail)); + } + + return flowIDs; + } + + private mapReportDetailToUniqueFlowEntity( + reportDetail: InstanceDataOfModel + ): UniqueFlowEntity { + return { + id: createBrandedValue(reportDetail.flowID), + versionID: reportDetail.versionID, + }; + } } diff --git a/tests/unit/flow-search-service.spec.ts b/tests/unit/flow-search-service.spec.ts index a12e1fa6..a6ecbf4d 100644 --- a/tests/unit/flow-search-service.spec.ts +++ b/tests/unit/flow-search-service.spec.ts @@ -17,12 +17,7 @@ describe('FlowSearchService', () => { const flowFilters = new SearchFlowsFilters(); flowFilters.id = [1]; flowFilters.activeStatus = true; - flowFilters.status = 'commitment'; - flowFilters.type = 'carryover'; flowFilters.amountUSD = 1000; - flowFilters.reporterReferenceCode = 123; - flowFilters.sourceSystemId = 456; - flowFilters.legacyId = 789; const result = flowSearchService.prepareFlowConditions(flowFilters); From 95988e607d6d5be5faaa1e3c97960ca59ab059a4 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Wed, 17 Jan 2024 16:11:00 +0100 Subject: [PATCH 59/67] Change how sub-property filter works We used camelCase to determine subEntities that generate incongruences with properties like 'usageYear' Instead now we use dash '-' to determine nesting --- .../flows/flow-search-service.ts | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index b96599ce..08efe751 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -426,11 +426,11 @@ export class FlowSearchService { orderBy.column = struct[2]; orderBy.direction = struct[1] as FlowNestedDirection; - // We need to look after the capitalized letter - // that will indicate the entity - // and the whole word will be the subEntity - // Capitalized letter will never be the first letter - const entity = this.getSubstringUntilCapital(struct[0]); + // We need to look after the '-' character + // [0] will indicate the entity + // and [1] will be the subEntity + const splitted = struct[0].split('-'); + const entity = splitted[0]; orderBy.entity = entity; if (entity === struct[0]) { @@ -442,15 +442,6 @@ export class FlowSearchService { return orderBy; } - getSubstringUntilCapital(inputString: string): string { - for (let i = 0; i < inputString.length; i++) { - if (inputString[i] === inputString[i].toUpperCase()) { - return inputString.substring(0, i); - } - } - return inputString; // Return inputString if no capital letter is found - } - prepareFlowConditions(flowFilters: SearchFlowsFilters): any { let flowConditions = {}; From 2ea19b31536e47e9f49bdb769e41ef3ed185c3c1 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 18 Jan 2024 09:10:00 +0100 Subject: [PATCH 60/67] Temp: fix validation error. Merge with: 2bfa08c3331f050822e632e3a2fa238b404b3cfd --- src/domain-services/flows/graphql/args.ts | 12 ++++-------- .../report-details/report-detail-service.ts | 2 ++ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index c90b611d..5c34b40c 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -15,22 +15,18 @@ export class SearchFlowsFilters { @Field(() => Boolean, { nullable: true }) restricted: boolean | null; - - constructor() {} } @InputType() export class NestedFlowFilters { - @Field(() => String, { name: 'reporterRefCode', nullable: true }) + @Field(() => [String], { nullable: true }) reporterReferenceCodes: string[] | null; - @Field(() => Number, { name: 'sourceSystemID', nullable: true }) + @Field(() => [String], { nullable: true }) sourceIDs: string[] | null; - @Field(() => Number, { name: 'legacyID', nullable: true }) - legacyId: number[] | null; - - constructor() {} + @Field(() => [Number], { nullable: true }) + legacyIDs: number[] | null; } @InputType() diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index 7ee13b31..1b097d6e 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -95,6 +95,7 @@ export class ReportDetailService { [Op.IN]: reporterReferenceCodes, }, }, + skipValidation: true, }); const flowIDs: UniqueFlowEntity[] = []; @@ -117,6 +118,7 @@ export class ReportDetailService { [Op.IN]: sourceIDs, }, }, + skipValidation: true, }); const flowIDs: UniqueFlowEntity[] = []; From 61fc1550b62c3bed94c0eda15fb77e08342f9649 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 25 Jan 2024 12:24:00 +0100 Subject: [PATCH 61/67] Proper usage of nested flow properties filtering --- src/domain-services/flows/graphql/args.ts | 12 ++++----- ...-from-nested-flow-filters-strategy-impl.ts | 15 ++++------- .../report-details/report-detail-service.ts | 27 +++---------------- 3 files changed, 15 insertions(+), 39 deletions(-) diff --git a/src/domain-services/flows/graphql/args.ts b/src/domain-services/flows/graphql/args.ts index 5c34b40c..c10f22de 100644 --- a/src/domain-services/flows/graphql/args.ts +++ b/src/domain-services/flows/graphql/args.ts @@ -19,14 +19,14 @@ export class SearchFlowsFilters { @InputType() export class NestedFlowFilters { - @Field(() => [String], { nullable: true }) - reporterReferenceCodes: string[] | null; + @Field(() => String, { nullable: true }) + reporterRefCode: string | null; - @Field(() => [String], { nullable: true }) - sourceIDs: string[] | null; + @Field(() => String, { nullable: true }) + sourceSystemID: string | null; - @Field(() => [Number], { nullable: true }) - legacyIDs: number[] | null; + @Field(() => Number, { nullable: true }) + legacyID: number | null; } @InputType() diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts index 1adb91d7..154d7187 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts @@ -24,30 +24,25 @@ export class GetFlowIdsFromNestedFlowFiltersStrategyImpl const flowsLegacyId: UniqueFlowEntity[] = []; // Get the flowIDs using 'reporterReferenceCode' - if ( - nestedFlowFilters?.reporterReferenceCodes && - nestedFlowFilters?.reporterReferenceCodes.length > 0 - ) { + if (nestedFlowFilters?.reporterRefCode) { flowsReporterReferenceCode = await this.reportDetailService.getUniqueFlowIDsFromReportDetailsByReporterReferenceCode( models, - nestedFlowFilters.reporterReferenceCodes + nestedFlowFilters.reporterRefCode ); } // Get the flowIDs using 'sourceSystemID' - if ( - nestedFlowFilters?.sourceIDs && - nestedFlowFilters?.sourceIDs.length > 0 - ) { + if (nestedFlowFilters?.sourceSystemID) { flowsSourceSystemId = await this.reportDetailService.getUniqueFlowIDsFromReportDetailsBySourceID( models, - nestedFlowFilters.sourceIDs + nestedFlowFilters.sourceSystemID ); } // TODO: Get the flowIDs using 'legacyID' + // TODO: create model for that // if(nestedFlowFilters?.legacyId) { // flowsLegacyId = await this.flowService.getFlowIDsFromSourceSystemID( // models, diff --git a/src/domain-services/report-details/report-detail-service.ts b/src/domain-services/report-details/report-detail-service.ts index 1b097d6e..2e3975e0 100644 --- a/src/domain-services/report-details/report-detail-service.ts +++ b/src/domain-services/report-details/report-detail-service.ts @@ -4,7 +4,6 @@ import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; import { Service } from 'typedi'; -import { type Category } from '../categories/graphql/types'; import { type UniqueFlowEntity } from '../flows/model'; import { type ReportDetail } from './graphql/types'; @Service() @@ -70,30 +69,14 @@ export class ReportDetailService { }; } - addChannelToReportDetails( - reportDetails: ReportDetail[], - categories: Category[] - ) { - for (const reportDetail of reportDetails) { - const category = categories.find((cat) => cat.group === 'reportChannel'); - - if (category) { - reportDetail.channel = category.name; - } - } - return reportDetails; - } - async getUniqueFlowIDsFromReportDetailsByReporterReferenceCode( models: Database, - reporterReferenceCodes: string[] + reporterRefCode: string ): Promise { const reportDetails: Array> = await models.reportDetail.find({ where: { - refCode: { - [Op.IN]: reporterReferenceCodes, - }, + refCode: reporterRefCode, }, skipValidation: true, }); @@ -109,14 +92,12 @@ export class ReportDetailService { async getUniqueFlowIDsFromReportDetailsBySourceID( models: Database, - sourceIDs: string[] + sourceID: string ): Promise { const reportDetails: Array> = await models.reportDetail.find({ where: { - sourceID: { - [Op.IN]: sourceIDs, - }, + sourceID: sourceID, }, skipValidation: true, }); From 315124a33dfc5c546388fe158bd01b5d10dbeecc Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 25 Jan 2024 12:25:34 +0100 Subject: [PATCH 62/67] Fetch reportDetails categories --- .../categories/category-service.ts | 84 ++++++++++++++++++- .../flows/flow-search-service.ts | 6 +- 2 files changed, 86 insertions(+), 4 deletions(-) diff --git a/src/domain-services/categories/category-service.ts b/src/domain-services/categories/category-service.ts index 293116f1..a6f14bd9 100644 --- a/src/domain-services/categories/category-service.ts +++ b/src/domain-services/categories/category-service.ts @@ -1,8 +1,13 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; -import { Op } from '@unocha/hpc-api-core/src/db/util/conditions'; +import { + Cond, + Op, + type Condition, +} from '@unocha/hpc-api-core/src/db/util/conditions'; import { type InstanceDataOfModel } from '@unocha/hpc-api-core/src/db/util/raw-model'; import { Service } from 'typedi'; +import { type ReportDetail } from '../report-details/graphql/types'; import { type Category } from './graphql/types'; @Service() @@ -117,4 +122,81 @@ export class CategoryService { return categoryRef; } + + async addChannelToReportDetails( + models: Database, + reportDetails: ReportDetail[] + ) { + const listOfCategoryRefORs = []; + + for (const reportDetail of reportDetails) { + const orClause = { + objectID: reportDetail.id, + objectType: 'reportDetail', + }; + + listOfCategoryRefORs.push(orClause); + } + + const categoriesRef: Array> = + await models.categoryRef.find({ + where: { + [Cond.OR]: listOfCategoryRefORs as Array< + Condition> + >, + }, + }); + + const mapOfCategoriesAndReportDetails = new Map(); + + for (const categoryRef of categoriesRef) { + const reportDetail = reportDetails.find( + (reportDetail) => reportDetail.id === categoryRef.objectID.valueOf() + ); + + if (!reportDetail) { + continue; + } + + if ( + !mapOfCategoriesAndReportDetails.has(categoryRef.categoryID.valueOf()) + ) { + mapOfCategoriesAndReportDetails.set( + categoryRef.categoryID.valueOf(), + [] + ); + } + + const reportDetailsPerCategory = mapOfCategoriesAndReportDetails.get( + categoryRef.categoryID.valueOf() + )!; + reportDetailsPerCategory.push(reportDetail); + } + + const categories: Array> = + await models.category.find({ + where: { + id: { + [Op.IN]: categoriesRef.map((catRef) => catRef.categoryID), + }, + }, + }); + + for (const [ + category, + reportDetails, + ] of mapOfCategoriesAndReportDetails.entries()) { + const categoryObj = categories.find((cat) => cat.id === category); + + if (!categoryObj) { + continue; + } + + for (const reportDetail of reportDetails) { + reportDetail.channel = categoryObj.name; + } + } + + return reportDetails; + } } diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 08efe751..4d3933d3 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -225,9 +225,9 @@ export class FlowSearchService { const reportDetails = reportDetailsMap.get(flow.id) ?? []; const reportDetailsWithChannel = - this.reportDetailService.addChannelToReportDetails( - reportDetails, - categories + await this.categoryService.addChannelToReportDetails( + models, + reportDetails ); let parkedParentSource: FlowParkedParentSource | null = null; From 596f40d00a2b192eb145c123c68901ece11f3b4a Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 25 Jan 2024 12:28:00 +0100 Subject: [PATCH 63/67] Add legacyID suport with models approach --- package.json | 2 +- ...-from-nested-flow-filters-strategy-impl.ts | 29 ++++++++++++------- src/domain-services/legacy/legacy-service.ts | 24 +++++++++++++++ 3 files changed, 44 insertions(+), 11 deletions(-) create mode 100644 src/domain-services/legacy/legacy-service.ts diff --git a/package.json b/package.json index d2566809..9916cff7 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "lint": "yarn lint-prettier && yarn lint-eslint" }, "dependencies": { - "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#e298382f38848370c6daa0ac86b2016eddbef356", + "@unocha/hpc-api-core": "github:UN-OCHA/hpc-api-core#242c7c8e88ee130695b987afc06589afd5408710", "apollo-server-hapi": "^3.12.0", "bunyan": "^1.8.15", "class-validator": "^0.14.0", diff --git a/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts index 154d7187..5a633684 100644 --- a/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/get-flowIds-flow-from-nested-flow-filters-strategy-impl.ts @@ -1,4 +1,5 @@ import { Service } from 'typedi'; +import { LegacyService } from '../../../legacy/legacy-service'; import { ReportDetailService } from '../../../report-details/report-detail-service'; import { type UniqueFlowEntity } from '../../model'; import { @@ -12,7 +13,10 @@ import { intersectUniqueFlowEntities } from './utils'; export class GetFlowIdsFromNestedFlowFiltersStrategyImpl implements FlowIDSearchStrategy { - constructor(private readonly reportDetailService: ReportDetailService) {} + constructor( + private readonly reportDetailService: ReportDetailService, + private readonly legacyService: LegacyService + ) {} async search( args: FlowIdSearchStrategyArgs @@ -41,15 +45,20 @@ export class GetFlowIdsFromNestedFlowFiltersStrategyImpl ); } - // TODO: Get the flowIDs using 'legacyID' - // TODO: create model for that - // if(nestedFlowFilters?.legacyId) { - // flowsLegacyId = await this.flowService.getFlowIDsFromSourceSystemID( - // models, - // databaseConnection, - // nestedFlowFilters.sourceSystemId - // ); - // } + // Get the flowIDs using 'legacyID' + if (nestedFlowFilters?.legacyID) { + const flowID = await this.legacyService.getFlowIdFromLegacyId( + models, + nestedFlowFilters.legacyID + ); + + if (flowID) { + flowsLegacyId.push({ + id: flowID, + versionID: 1, + }); + } + } // Intersect the flowIDs from the nestedFlowFilters const flowIDsFromNestedFlowFilters: UniqueFlowEntity[] = diff --git a/src/domain-services/legacy/legacy-service.ts b/src/domain-services/legacy/legacy-service.ts new file mode 100644 index 00000000..060cf7fa --- /dev/null +++ b/src/domain-services/legacy/legacy-service.ts @@ -0,0 +1,24 @@ +import { type Database } from '@unocha/hpc-api-core/src/db'; +import { type FlowId } from '@unocha/hpc-api-core/src/db/models/flow'; +import { createBrandedValue } from '@unocha/hpc-api-core/src/util/types'; +import { Service } from 'typedi'; + +@Service() +export class LegacyService { + async getFlowIdFromLegacyId( + models: Database, + legacyId: number + ): Promise { + const legacyEntry = await models.legacy.findOne({ + where: { + legacyID: legacyId, + objectType: 'flow', + }, + }); + + if (legacyEntry) { + return createBrandedValue(legacyEntry.objectID); + } + return null; + } +} From dd38861eb23bf498d624a374f66edd9769afc74f Mon Sep 17 00:00:00 2001 From: manelcecs Date: Mon, 29 Jan 2024 10:50:00 +0100 Subject: [PATCH 64/67] Fix nesting property sorting --- src/domain-services/flow-object/model.ts | 36 ++++++---- .../flows/flow-search-service.ts | 7 -- src/domain-services/flows/flow-service.ts | 70 ++++++++++++------- .../search-flow-by-filters-strategy-impl.ts | 8 ++- 4 files changed, 74 insertions(+), 47 deletions(-) diff --git a/src/domain-services/flow-object/model.ts b/src/domain-services/flow-object/model.ts index 7ca369ea..e3668154 100644 --- a/src/domain-services/flow-object/model.ts +++ b/src/domain-services/flow-object/model.ts @@ -1,19 +1,25 @@ import { type Database } from '@unocha/hpc-api-core/src/db'; import { type InstanceOfModel } from '@unocha/hpc-api-core/src/db/util/types'; +import * as t from 'io-ts'; export type FlowObject = InstanceOfModel; -export type FlowObjectType = - | 'governingEntity' - | 'plan' - | 'planEntity' - | 'project' - | 'globalCluster' - | 'organization' - | 'emergency' - | 'flow' - | 'location' - | 'anonymizedOrganization' - | 'cluster' - | 'corePlanEntityActivity' - | 'corePlanEntityObjective' - | 'usageYear'; + +// Define the FlowObjectType as a runtime type +export const FlowObjectType = t.union([ + t.literal('governingEntity'), + t.literal('plan'), + t.literal('planEntity'), + t.literal('project'), + t.literal('globalCluster'), + t.literal('organization'), + t.literal('emergency'), + t.literal('flow'), + t.literal('location'), + t.literal('anonymizedOrganization'), + t.literal('cluster'), + t.literal('corePlanEntityActivity'), + t.literal('corePlanEntityObjective'), + t.literal('usageYear'), +]); + +export type FlowObjectType = t.TypeOf; diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 4d3933d3..e6d1e58e 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -117,13 +117,6 @@ export class FlowSearchService { orderBy ); - // Build cursor condition - // const cursorCondition = this.buildCursorCondition( - // prevPageCursor, - // nextPageCursor, - // orderBy - // ); - const offset = nextPageCursor ?? prevPageCursor ?? 0; const { flows, count } = await strategy.search({ diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 42202a6a..0aeea8be 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -1,7 +1,7 @@ import { type Database } from '@unocha/hpc-api-core/src/db/type'; import type Knex from 'knex'; import { Service } from 'typedi'; -import { type FlowObjectType } from '../flow-object/model'; +import { FlowObjectType } from '../flow-object/model'; import { type FlowOrderBy, type GetFlowsArgs, @@ -71,35 +71,57 @@ export class FlowService { const mappedOrderBy = mapFlowOrderBy(orderBy); const entityList = await dbConnection .queryBuilder() - .select(orderBy.subEntity ? `${orderBy.subEntity}Id` : 'id') + .select( + orderBy.subEntity ? `${orderBy.subEntity}Id` : 'id', + 'flowID', + 'versionID' + ) .from(entity) .orderBy(mappedOrderBy.column, mappedOrderBy.order); - const entityIDs = entityList.map((entity) => entity.id); - // Get the flowIDs from the entity list - // using the flow-object relation - const entityCondKey = orderBy.entity as unknown as FlowObjectType; + let mapFlowsToUniqueFlowEntities; - const query = dbConnection - .queryBuilder() - .select('flowID', 'versionID') - .from('flowObject') - .whereIn('objectID', entityIDs) - .andWhere('objectType', entityCondKey) - .andWhere('refDirection', orderBy.direction!) - .orderByRaw(`array_position(ARRAY[${entityIDs.join(',')}], "objectID")`) - .orderBy('flowID', orderBy.order); - - if (limit) { - query.limit(limit); + // Get the flowIDs from the entity list + // using the flow-object relation if the entity is a flow-object + const entityCondKey = orderBy.entity as unknown; + + // Validate the variable using io-ts + const result = FlowObjectType.decode(entityCondKey); + + if (result._tag === 'Right') { + const entityIDs = entityList.map((entity) => entity.id); + const entityCondKeyFlowObjectType = entityCondKey as FlowObjectType; + let query = dbConnection + .queryBuilder() + .select('flowID', 'versionID') + .from('flowObject') + .whereIn('objectID', entityIDs) + .andWhere('objectType', entityCondKeyFlowObjectType); + + if (orderBy.direction) { + query = query.orderBy('refDirection', orderBy.direction); + } + query = query + .orderByRaw(`array_position(ARRAY[${entityIDs.join(',')}], "objectID")`) + .orderBy('flowID', orderBy.order); + + if (limit) { + query.limit(limit); + } + + const flowIDs = await query; + + mapFlowsToUniqueFlowEntities = flowIDs.map((flowID) => ({ + id: flowID.flowID, + versionID: flowID.versionID, + })); + } else { + mapFlowsToUniqueFlowEntities = entityList.map((entity) => ({ + id: entity.flowID, + versionID: entity.versionID, + })); } - const flowIDs = await query; - const mapFlowsToUniqueFlowEntities = flowIDs.map((flowID) => ({ - id: flowID.flowID, - versionID: flowID.versionID, - })); - return removeDuplicatesUniqueFlowEntities(mapFlowsToUniqueFlowEntities); } } diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 2e7e2641..10b24899 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -58,7 +58,13 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { orderBy, limit ); - sortByFlowIDs.push(...flowIDsFromSortingEntity); + // Since there can be many flowIDs returned + // This can cause 'Maximum call stack size exceeded' error + // When using the spread operator - a workaround is to use push fot each element + // also, we need to map the FlowEntity to UniqueFlowEntity + for (const uniqueFlow of flowIDsFromSortingEntity) { + sortByFlowIDs.push(uniqueFlow); + } } else { // In this case we fetch the list of flows from the database // using the orderBy From 19134fe84cd781e149bf6e9c45b221f6bc7152db Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 30 Jan 2024 15:11:00 +0100 Subject: [PATCH 65/67] Allow no sorting from client --- .../flows/flow-search-service.ts | 38 +++---------------- src/domain-services/flows/flow-service.ts | 23 ++++++----- .../search-flow-by-filters-strategy-impl.ts | 17 +++++++-- .../flows/strategy/impl/utils.ts | 31 ++++++++++++--- 4 files changed, 60 insertions(+), 49 deletions(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index e6d1e58e..6ad5338a 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -208,8 +208,8 @@ export class FlowSearchService { // Categories Map follows the structure: // flowID: { versionID: [categories]} // So we need to get the categories for the flow version - const categories = - categoriesMap.get(flow.id)!.get(flow.versionID) ?? []; + const categories = categoriesMap.get(flow.id); + const categoriesByVersion = categories?.get(flow.versionID) ?? []; const organizations = organizationsMap.get(flow.id) ?? []; const locations = locationsMap.get(flow.id) ?? []; const plans = plansMap.get(flow.id) ?? []; @@ -253,7 +253,7 @@ export class FlowSearchService { return this.buildFlowDTO( flow, - categories, + categoriesByVersion, organizations, locations, plans, @@ -267,27 +267,6 @@ export class FlowSearchService { }) ); - // const isOrderByForFlows = orderBy.entity === 'flow'; - // const firstItem = items[0]; - // const prevPageCursorEntity = isOrderByForFlows - // ? firstItem - // : firstItem[orderBy.entity as keyof typeof firstItem]; - // const prevPageCursorValue = prevPageCursorEntity - // ? prevPageCursorEntity[ - // orderBy.column as keyof typeof prevPageCursorEntity - // ] ?? '' - // : ''; - - // const lastItem = items.at(-1); - // const nextPageCursorEntity = isOrderByForFlows - // ? lastItem - // : lastItem![orderBy.entity as keyof typeof lastItem]; - // const nextPageCursorValue = nextPageCursorEntity - // ? nextPageCursorEntity[ - // orderBy.column as keyof typeof nextPageCursorEntity - // ]?.toString() ?? '' - // : ''; - return { flows: items, hasNextPage: limit <= flows.length, @@ -343,7 +322,7 @@ export class FlowSearchService { operation: filter.flag ? Op.IN : Op.NOT_IN, })); - return shortcutFilters; + return shortcutFilters.length > 0 ? shortcutFilters : null; } determineStrategy( @@ -360,7 +339,8 @@ export class FlowSearchService { // If there are no sortByEntity (orderBy.entity === 'flow') // but flowFilters only // use onlyFlowFiltersStrategy - const isOrderByEntityFlow = orderBy?.entity === 'flow'; + const isOrderByEntityFlow = + orderBy === undefined || orderBy?.entity === 'flow'; const isFlowFiltersDefined = flowFilters !== undefined; const isFlowObjectFiltersDefined = flowObjectFilters !== undefined; const isFlowCategoryFiltersDefined = flowCategoryFilters !== undefined; @@ -764,12 +744,6 @@ export class FlowSearchService { } // Validate the shortcut filters - // There must be only one shortcut filter - // if only one is defined - // return an object like - // { category: 'Parked', operation: 'IN' } - // if more than one is defined - // throw an error const shortcutFilter = this.mapShortcutFilters( isPendingFlows, isCommitmentFlows, diff --git a/src/domain-services/flows/flow-service.ts b/src/domain-services/flows/flow-service.ts index 0aeea8be..5ff4f4ac 100644 --- a/src/domain-services/flows/flow-service.ts +++ b/src/domain-services/flows/flow-service.ts @@ -69,15 +69,6 @@ export class FlowService { // Get the entity list const mappedOrderBy = mapFlowOrderBy(orderBy); - const entityList = await dbConnection - .queryBuilder() - .select( - orderBy.subEntity ? `${orderBy.subEntity}Id` : 'id', - 'flowID', - 'versionID' - ) - .from(entity) - .orderBy(mappedOrderBy.column, mappedOrderBy.order); let mapFlowsToUniqueFlowEntities; @@ -89,6 +80,13 @@ export class FlowService { const result = FlowObjectType.decode(entityCondKey); if (result._tag === 'Right') { + const entityList = await dbConnection + .queryBuilder() + .select('id') + .from(entity) + .orderBy(mappedOrderBy.column, mappedOrderBy.order) + .orderBy('id', mappedOrderBy.order); + const entityIDs = entityList.map((entity) => entity.id); const entityCondKeyFlowObjectType = entityCondKey as FlowObjectType; let query = dbConnection @@ -116,6 +114,13 @@ export class FlowService { versionID: flowID.versionID, })); } else { + const entityList = await dbConnection + .queryBuilder() + .select(`${orderBy.subEntity}Id`, 'flowID', 'versionID') + .from(entity) + .orderBy(mappedOrderBy.column, mappedOrderBy.order) + .orderBy('id', mappedOrderBy.order); + mapFlowsToUniqueFlowEntities = entityList.map((entity) => ({ id: entity.flowID, versionID: entity.versionID, diff --git a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts index 10b24899..d1c6bde8 100644 --- a/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts +++ b/src/domain-services/flows/strategy/impl/search-flow-by-filters-strategy-impl.ts @@ -1,6 +1,7 @@ import { Cond } from '@unocha/hpc-api-core/src/db/util/conditions'; import { Service } from 'typedi'; import { FlowService } from '../../flow-service'; +import { type SearchFlowsFilters } from '../../graphql/args'; import { type FlowEntity, type UniqueFlowEntity } from '../../model'; import { type FlowSearchArgs, @@ -219,9 +220,9 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { ); // Once the list of elements is reduced, we need to build the conditions - const searchConditions = this.buildConditions(reducedFlows); + const searchConditions = this.buildConditions(reducedFlows, flowFilters); - const orderByForFlow = mapFlowOrderBy(orderBy); + const orderByForFlow = { column: 'updatedAt', order: 'DESC' }; const flows = await this.flowService.getFlows({ models, @@ -232,11 +233,21 @@ export class SearchFlowByFiltersStrategy implements FlowSearchStrategy { return { flows, count }; } - buildConditions(uniqueFlowEntities: UniqueFlowEntity[]): any { + buildConditions( + uniqueFlowEntities: UniqueFlowEntity[], + flowFilters: SearchFlowsFilters + ): any { const whereClauses = uniqueFlowEntities.map((flow) => ({ [Cond.AND]: [{ id: flow.id }, { versionID: flow.versionID }], })); + if (flowFilters) { + const flowConditions = prepareFlowConditions(flowFilters); + return { + [Cond.AND]: [flowConditions, { [Cond.OR]: whereClauses }], + }; + } + return { [Cond.OR]: whereClauses, }; diff --git a/src/domain-services/flows/strategy/impl/utils.ts b/src/domain-services/flows/strategy/impl/utils.ts index f0a0378e..bdc8db79 100644 --- a/src/domain-services/flows/strategy/impl/utils.ts +++ b/src/domain-services/flows/strategy/impl/utils.ts @@ -142,16 +142,37 @@ export function mergeFlowIDsFromFilteredFlowObjectsAndFlowCategories( ); } -export function mapFlowOrderBy(orderBy: any) { +export const sortingColumnMapping: Map = new Map< + string, + string +>([ + ['reporterRefCode', 'refCode'], + ['sourceID', 'sourceID'], +]); + +export function mapFlowOrderBy(orderBy: any): { + column: string; + order: string; +} { if (!orderBy) { return { column: 'updatedAt', order: 'DESC' }; } - let orderByForFlow = { column: orderBy.column, order: orderBy.order }; - if (orderBy.entity !== 'flow') { - orderByForFlow = { column: 'updatedAt', order: 'DESC' }; + + if (orderBy.entity === 'flow') { + return { column: orderBy.column, order: orderBy.order }; + } + + let columnToSort: string; + if (sortingColumnMapping.has(orderBy.column)) { + // I don't like this but the compiler is complaining + // that columnToSort might be undefined if I don't do this + // but it's already checked that the column exists in the map + columnToSort = sortingColumnMapping.get(orderBy.column) ?? 'updatedAt'; + } else { + columnToSort = orderBy.column; } - return orderByForFlow; + return { column: columnToSort, order: orderBy.order }; } export function prepareFlowConditions(flowFilters: SearchFlowsFilters): any { From 0368b850cb0633578b74d104fc337069920c7a74 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Tue, 30 Jan 2024 16:30:00 +0100 Subject: [PATCH 66/67] Add default filter for searchTotalAmount --- src/domain-services/flows/flow-search-service.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 6ad5338a..9fb870a8 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -743,6 +743,9 @@ export class FlowSearchService { flowFilters.activeStatus = true; } + // This filter MUST apply always + flowFilters.restricted = false; + // Validate the shortcut filters const shortcutFilter = this.mapShortcutFilters( isPendingFlows, From 3f27c7c91d706f3abe308a57e6cd73873f31d844 Mon Sep 17 00:00:00 2001 From: manelcecs Date: Thu, 22 Feb 2024 13:08:23 +0100 Subject: [PATCH 67/67] Fix how prevPageCursor is computed --- src/domain-services/flows/flow-search-service.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/domain-services/flows/flow-search-service.ts b/src/domain-services/flows/flow-search-service.ts index 9fb870a8..dfb007e8 100644 --- a/src/domain-services/flows/flow-search-service.ts +++ b/src/domain-services/flows/flow-search-service.ts @@ -271,7 +271,7 @@ export class FlowSearchService { flows: items, hasNextPage: limit <= flows.length, hasPreviousPage: nextPageCursor !== undefined, - prevPageCursor: nextPageCursor ?? 0, + prevPageCursor: nextPageCursor ? nextPageCursor - limit : 0, nextPageCursor: nextPageCursor ? nextPageCursor + limit : limit, pageSize: flows.length, sortField: `${orderBy.entity}.${orderBy.column}` as FlowSortField,