diff --git a/.eslintrc b/.eslintrc
index f77da8144e9c..cb6d2cb8a0dd 100644
--- a/.eslintrc
+++ b/.eslintrc
@@ -312,6 +312,14 @@
"rules": {
"max-len": "off"
}
+ },
+ {
+ "files": ["scripts/**/*"],
+ "rules": {
+ "import/no-extraneous-dependencies": "off",
+ "no-console": "off",
+ "require-jsdoc":"off"
+ }
}
]
-}
+}
\ No newline at end of file
diff --git a/jest.config.js b/jest.config.cjs
similarity index 100%
rename from jest.config.js
rename to jest.config.cjs
diff --git a/next-i18next.config.cjs b/next-i18next.config.cjs
new file mode 100644
index 000000000000..6ee70f95d5fe
--- /dev/null
+++ b/next-i18next.config.cjs
@@ -0,0 +1,13 @@
+// The file is required to be named next-i18next.config.cjs so we can use it in next.config.js.
+// https://github.com/i18next/next-i18next/issues/2185#issuecomment-1618307556
+process.env.I18NEXT_DEFAULT_CONFIG_PATH = './next-i18next.config.cjs';
+
+module.exports = {
+ i18n: {
+ locales: ['en', 'de'],
+ defaultLocale: 'en',
+ namespaces: ['landing-page', 'common', 'tools'],
+ defaultNamespace: 'landing-page',
+ react: { useSuspense: false } // this line
+ }
+};
diff --git a/next-i18next.config.js b/next-i18next.config.js
deleted file mode 100644
index 2848266d6554..000000000000
--- a/next-i18next.config.js
+++ /dev/null
@@ -1,10 +0,0 @@
-module.exports = {
- i18n: {
- locales: ['en', 'de'],
- defaultLocale : 'en',
- namespaces: ['landing-page', 'common', 'tools'],
- defaultNamespace: 'landing-page',
- react: { useSuspense: false },// this line
- },
-
- };
diff --git a/package-lock.json b/package-lock.json
index 82080d33a35c..3d2a04d88f4e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -78,6 +78,7 @@
"swiper": "^11.0.7",
"tailwind-merge": "^2.2.1",
"tailwindcss": "^3.4.3",
+ "tsx": "^4.19.2",
"typescript": "^5.3.3",
"yaml": "^2.3.4"
},
@@ -93,6 +94,8 @@
"@storybook/nextjs": "^8.2.4",
"@storybook/react": "^8.2.4",
"@storybook/test": "^8.2.4",
+ "@types/fs-extra": "^11.0.4",
+ "@types/inquirer": "^9.0.7",
"@types/lodash": "^4.17.0",
"@types/node": "^20",
"@types/react": "^18.0.1",
@@ -2413,6 +2416,30 @@
"yarn": ">=1.22.18"
}
},
+ "node_modules/@cspotcode/source-map-support": {
+ "version": "0.8.1",
+ "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
+ "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
+ "optional": true,
+ "peer": true,
+ "dependencies": {
+ "@jridgewell/trace-mapping": "0.3.9"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": {
+ "version": "0.3.9",
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
+ "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
+ "optional": true,
+ "peer": true,
+ "dependencies": {
+ "@jridgewell/resolve-uri": "^3.0.3",
+ "@jridgewell/sourcemap-codec": "^1.4.10"
+ }
+ },
"node_modules/@docsearch/css": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz",
@@ -2729,6 +2756,21 @@
"node": ">=12"
}
},
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz",
+ "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@esbuild/openbsd-x64": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz",
@@ -7172,6 +7214,34 @@
"node": ">=10.13.0"
}
},
+ "node_modules/@tsconfig/node10": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
+ "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
+ "optional": true,
+ "peer": true
+ },
+ "node_modules/@tsconfig/node12": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
+ "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
+ "optional": true,
+ "peer": true
+ },
+ "node_modules/@tsconfig/node14": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
+ "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
+ "optional": true,
+ "peer": true
+ },
+ "node_modules/@tsconfig/node16": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
+ "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
+ "optional": true,
+ "peer": true
+ },
"node_modules/@types/acorn": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/@types/acorn/-/acorn-4.0.6.tgz",
@@ -7555,6 +7625,16 @@
"@types/send": "*"
}
},
+ "node_modules/@types/fs-extra": {
+ "version": "11.0.4",
+ "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz",
+ "integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==",
+ "dev": true,
+ "dependencies": {
+ "@types/jsonfile": "*",
+ "@types/node": "*"
+ }
+ },
"node_modules/@types/geojson": {
"version": "7946.0.14",
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.14.tgz",
@@ -7597,6 +7677,16 @@
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz",
"integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA=="
},
+ "node_modules/@types/inquirer": {
+ "version": "9.0.7",
+ "resolved": "https://registry.npmjs.org/@types/inquirer/-/inquirer-9.0.7.tgz",
+ "integrity": "sha512-Q0zyBupO6NxGRZut/JdmqYKOnN95Eg5V8Csg3PGKkP+FnvsUZx1jAyK7fztIszxxMuoBA6E3KXWvdZVXIpx60g==",
+ "dev": true,
+ "dependencies": {
+ "@types/through": "*",
+ "rxjs": "^7.2.0"
+ }
+ },
"node_modules/@types/is-empty": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@types/is-empty/-/is-empty-1.2.3.tgz",
@@ -7643,6 +7733,15 @@
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
"dev": true
},
+ "node_modules/@types/jsonfile": {
+ "version": "6.1.4",
+ "resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz",
+ "integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==",
+ "dev": true,
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/@types/lodash": {
"version": "4.17.6",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.6.tgz",
@@ -7847,6 +7946,15 @@
"integrity": "sha512-hcZhlNvMkQG/k1vcZ6yHOl6WAYftQ2MLfTHcYRZ2xYZFD8tGVnE3qFV0lj1smQeDSR7/yY0PyuUalauf33bJeA==",
"dev": true
},
+ "node_modules/@types/through": {
+ "version": "0.0.33",
+ "resolved": "https://registry.npmjs.org/@types/through/-/through-0.0.33.tgz",
+ "integrity": "sha512-HsJ+z3QuETzP3cswwtzt2vEIiHBk/dCcHGhbmG5X3ecnwFD/lPrMpliGXxSCg03L9AhrdwA4Oz/qfspkDW+xGQ==",
+ "dev": true,
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/@types/unist": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
@@ -10726,6 +10834,13 @@
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
}
},
+ "node_modules/create-require": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
+ "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
+ "optional": true,
+ "peer": true
+ },
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -14593,7 +14708,6 @@
"version": "4.7.5",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz",
"integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==",
- "dev": true,
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
},
@@ -18246,6 +18360,13 @@
"semver": "bin/semver.js"
}
},
+ "node_modules/make-error": {
+ "version": "1.3.6",
+ "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
+ "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
+ "optional": true,
+ "peer": true
+ },
"node_modules/makeerror": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz",
@@ -26186,7 +26307,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
- "dev": true,
"funding": {
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
@@ -28610,6 +28730,93 @@
"resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz",
"integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="
},
+ "node_modules/ts-node": {
+ "version": "10.9.2",
+ "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
+ "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
+ "optional": true,
+ "peer": true,
+ "dependencies": {
+ "@cspotcode/source-map-support": "^0.8.0",
+ "@tsconfig/node10": "^1.0.7",
+ "@tsconfig/node12": "^1.0.7",
+ "@tsconfig/node14": "^1.0.0",
+ "@tsconfig/node16": "^1.0.2",
+ "acorn": "^8.4.1",
+ "acorn-walk": "^8.1.1",
+ "arg": "^4.1.0",
+ "create-require": "^1.1.0",
+ "diff": "^4.0.1",
+ "make-error": "^1.1.1",
+ "v8-compile-cache-lib": "^3.0.1",
+ "yn": "3.1.1"
+ },
+ "bin": {
+ "ts-node": "dist/bin.js",
+ "ts-node-cwd": "dist/bin-cwd.js",
+ "ts-node-esm": "dist/bin-esm.js",
+ "ts-node-script": "dist/bin-script.js",
+ "ts-node-transpile-only": "dist/bin-transpile.js",
+ "ts-script": "dist/bin-script-deprecated.js"
+ },
+ "peerDependencies": {
+ "@swc/core": ">=1.2.50",
+ "@swc/wasm": ">=1.2.50",
+ "@types/node": "*",
+ "typescript": ">=2.7"
+ },
+ "peerDependenciesMeta": {
+ "@swc/core": {
+ "optional": true
+ },
+ "@swc/wasm": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/ts-node/node_modules/acorn": {
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
+ "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
+ "optional": true,
+ "peer": true,
+ "bin": {
+ "acorn": "bin/acorn"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/ts-node/node_modules/acorn-walk": {
+ "version": "8.3.4",
+ "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
+ "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
+ "optional": true,
+ "peer": true,
+ "dependencies": {
+ "acorn": "^8.11.0"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/ts-node/node_modules/arg": {
+ "version": "4.1.3",
+ "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
+ "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
+ "optional": true,
+ "peer": true
+ },
+ "node_modules/ts-node/node_modules/diff": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+ "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
+ "optional": true,
+ "peer": true,
+ "engines": {
+ "node": ">=0.3.1"
+ }
+ },
"node_modules/ts-pnp": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.2.0.tgz",
@@ -28685,6 +28892,407 @@
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
"dev": true
},
+ "node_modules/tsx": {
+ "version": "4.19.2",
+ "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.19.2.tgz",
+ "integrity": "sha512-pOUl6Vo2LUq/bSa8S5q7b91cgNSjctn9ugq/+Mvow99qW6x/UZYwzxy/3NmqoT66eHYfCVvFvACC58UBPFf28g==",
+ "dependencies": {
+ "esbuild": "~0.23.0",
+ "get-tsconfig": "^4.7.5"
+ },
+ "bin": {
+ "tsx": "dist/cli.mjs"
+ },
+ "engines": {
+ "node": ">=18.0.0"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.3"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/aix-ppc64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz",
+ "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==",
+ "cpu": [
+ "ppc64"
+ ],
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/android-arm": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz",
+ "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==",
+ "cpu": [
+ "arm"
+ ],
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/android-arm64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz",
+ "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/android-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz",
+ "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/darwin-arm64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz",
+ "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/darwin-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz",
+ "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz",
+ "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/freebsd-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz",
+ "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-arm": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz",
+ "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==",
+ "cpu": [
+ "arm"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-arm64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz",
+ "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-ia32": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz",
+ "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-loong64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz",
+ "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==",
+ "cpu": [
+ "loong64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-mips64el": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz",
+ "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==",
+ "cpu": [
+ "mips64el"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-ppc64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz",
+ "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==",
+ "cpu": [
+ "ppc64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-riscv64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz",
+ "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==",
+ "cpu": [
+ "riscv64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-s390x": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz",
+ "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==",
+ "cpu": [
+ "s390x"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/linux-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz",
+ "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/netbsd-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz",
+ "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/openbsd-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz",
+ "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/sunos-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz",
+ "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/win32-arm64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz",
+ "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/win32-ia32": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz",
+ "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/@esbuild/win32-x64": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz",
+ "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tsx/node_modules/esbuild": {
+ "version": "0.23.1",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz",
+ "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==",
+ "hasInstallScript": true,
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.23.1",
+ "@esbuild/android-arm": "0.23.1",
+ "@esbuild/android-arm64": "0.23.1",
+ "@esbuild/android-x64": "0.23.1",
+ "@esbuild/darwin-arm64": "0.23.1",
+ "@esbuild/darwin-x64": "0.23.1",
+ "@esbuild/freebsd-arm64": "0.23.1",
+ "@esbuild/freebsd-x64": "0.23.1",
+ "@esbuild/linux-arm": "0.23.1",
+ "@esbuild/linux-arm64": "0.23.1",
+ "@esbuild/linux-ia32": "0.23.1",
+ "@esbuild/linux-loong64": "0.23.1",
+ "@esbuild/linux-mips64el": "0.23.1",
+ "@esbuild/linux-ppc64": "0.23.1",
+ "@esbuild/linux-riscv64": "0.23.1",
+ "@esbuild/linux-s390x": "0.23.1",
+ "@esbuild/linux-x64": "0.23.1",
+ "@esbuild/netbsd-x64": "0.23.1",
+ "@esbuild/openbsd-arm64": "0.23.1",
+ "@esbuild/openbsd-x64": "0.23.1",
+ "@esbuild/sunos-x64": "0.23.1",
+ "@esbuild/win32-arm64": "0.23.1",
+ "@esbuild/win32-ia32": "0.23.1",
+ "@esbuild/win32-x64": "0.23.1"
+ }
+ },
"node_modules/tty-browserify": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz",
@@ -29712,6 +30320,13 @@
"node": ">=6"
}
},
+ "node_modules/v8-compile-cache-lib": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
+ "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
+ "optional": true,
+ "peer": true
+ },
"node_modules/v8-to-istanbul": {
"version": "9.3.0",
"resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz",
@@ -30454,6 +31069,16 @@
"node": ">=12"
}
},
+ "node_modules/yn": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
+ "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
+ "optional": true,
+ "peer": true,
+ "engines": {
+ "node": ">=6"
+ }
+ },
"node_modules/yocto-queue": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
diff --git a/package.json b/package.json
index 84f538697d4d..92ece21c0c9f 100644
--- a/package.json
+++ b/package.json
@@ -3,14 +3,15 @@
"version": "0.1.0",
"description": "AsyncAPI website",
"private": true,
+ "type": "module",
"scripts": {
"dev": "npm run build-scripts && next dev",
"build": "npm run build-scripts && next build",
"test": "jest --passWithNoTests",
- "build:pages": "node scripts/build-pages.js && npm run format:mdx",
- "build:posts": "node scripts/index.js",
+ "build:pages": "tsx scripts/build-pages.ts && npm run format:mdx",
+ "build:posts": "tsx scripts/index.ts",
"build-scripts": "npm run build:pages && npm run lint:mdx && npm run build:posts",
- "write:blog": "node ./scripts/compose.js",
+ "write:blog": "tsx ./scripts/compose.ts",
"start": "npx serve@latest out",
"export": "next export",
"lint": "next lint",
@@ -18,12 +19,12 @@
"format:mdx": "prettier --write \"**/*.mdx\"",
"lint:mdx": "remark \"**/*.mdx\"",
"generate:assets": "echo \"No assets to configure\"",
- "generate:meetings": "node scripts/build-meetings.js",
- "generate:dashboard": "node scripts/dashboard/build-dashboard.js",
- "generate:videos": "node scripts/build-newsroom-videos.js",
- "generate:tools": "node scripts/build-tools.js",
+ "generate:meetings": "tsx scripts/build-meetings.ts",
+ "generate:dashboard": "tsx scripts/dashboard/build-dashboard.ts",
+ "generate:videos": "tsx scripts/build-newsroom-videos.ts",
+ "generate:tools": "tsx scripts/build-tools.ts",
"test:netlify": "deno test --allow-env --trace-ops netlify/**/*.test.ts",
- "test:md": "node scripts/markdown/check-markdown.js",
+ "test:md": "tsx scripts/markdown/check-markdown.ts",
"dev:storybook": "storybook dev -p 6006",
"build:storybook": "storybook build"
},
@@ -114,6 +115,7 @@
"swiper": "^11.0.7",
"tailwind-merge": "^2.2.1",
"tailwindcss": "^3.4.3",
+ "tsx": "^4.19.2",
"typescript": "^5.3.3",
"yaml": "^2.3.4"
},
@@ -129,6 +131,8 @@
"@storybook/nextjs": "^8.2.4",
"@storybook/react": "^8.2.4",
"@storybook/test": "^8.2.4",
+ "@types/fs-extra": "^11.0.4",
+ "@types/inquirer": "^9.0.7",
"@types/lodash": "^4.17.0",
"@types/node": "^20",
"@types/react": "^18.0.1",
@@ -151,13 +155,13 @@
"eslint-plugin-storybook": "^0.8.0",
"eslint-plugin-tailwindcss": "^3.14.2",
"eslint-plugin-unused-imports": "^3.1.0",
+ "fast-xml-parser": "^4.5.0",
"inquirer": "^9.2.14",
"jest": "^29.7.0",
"postcss-import": "^16.0.1",
"remark-cli": "^12.0.1",
"remark-lint": "^10.0.0",
"remark-mdx": "^3.0.1",
- "storybook": "^8.2.4",
- "fast-xml-parser": "^4.5.0"
+ "storybook": "^8.2.4"
}
}
diff --git a/pages/_document.tsx b/pages/_document.tsx
index b220a1b44841..fecd327edff1 100644
--- a/pages/_document.tsx
+++ b/pages/_document.tsx
@@ -1,7 +1,7 @@
import Document, { Head, Html, Main, NextScript } from 'next/document';
import React from 'react';
-import i18nextConfig from '../next-i18next.config';
+import i18nextConfig from '../next-i18next.config.cjs';
class MyDocument extends Document {
static async getInitialProps(ctx: any) {
diff --git a/postcss.config.js b/postcss.config.cjs
similarity index 100%
rename from postcss.config.js
rename to postcss.config.cjs
diff --git a/scripts/adopters/index.js b/scripts/adopters/index.js
deleted file mode 100644
index 6a11697ad68f..000000000000
--- a/scripts/adopters/index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const { resolve } = require('path');
-const writeJSON = require('../utils/readAndWriteJson.js')
-
-module.exports = async function buildAdoptersList() {
- writeJSON('config/adopters.yml',resolve(__dirname, '../../config', 'adopters.json'));
-};
diff --git a/scripts/adopters/index.ts b/scripts/adopters/index.ts
new file mode 100644
index 000000000000..c7307049bf59
--- /dev/null
+++ b/scripts/adopters/index.ts
@@ -0,0 +1,11 @@
+import { dirname, resolve } from 'path';
+import { fileURLToPath } from 'url';
+
+import { writeJSON } from '../utils/readAndWriteJson';
+
+const currentFilePath = fileURLToPath(import.meta.url);
+const currentDirPath = dirname(currentFilePath);
+
+export async function buildAdoptersList() {
+ writeJSON('config/adopters.yml', resolve(currentDirPath, '../../config', 'adopters.json'));
+}
diff --git a/scripts/build-docs.js b/scripts/build-docs.ts
similarity index 73%
rename from scripts/build-docs.js
rename to scripts/build-docs.ts
index ac47b6751cee..f63f4914709f 100644
--- a/scripts/build-docs.js
+++ b/scripts/build-docs.ts
@@ -1,23 +1,34 @@
-const sortBy = require('lodash/sortBy')
+import lodash from 'lodash';
+
+const { sortBy } = lodash;
+
function buildNavTree(navItems) {
try {
const tree = {
- 'welcome': {
- item: { title: 'Welcome', weight: 0, isRootSection: true, isSection: true, rootSectionId: 'welcome', sectionWeight: 0, slug: '/docs' },
+ welcome: {
+ item: {
+ title: 'Welcome',
+ weight: 0,
+ isRootSection: true,
+ isSection: true,
+ rootSectionId: 'welcome',
+ sectionWeight: 0,
+ slug: '/docs'
+ },
children: {}
}
- }
+ };
- //first we make sure that list of items lists main section items and then sub sections, documents last
+ // first we make sure that list of items lists main section items and then sub sections, documents last
const sortedItems = sortBy(navItems, ['isRootSection', 'weight', 'isSection']);
- sortedItems.forEach(item => {
- //identify main sections
+ sortedItems.forEach((item) => {
+ // identify main sections
if (item.isRootSection) {
- tree[item.rootSectionId] = { item, children: {} }
+ tree[item.rootSectionId] = { item, children: {} };
}
- //identify subsections
+ // identify subsections
if (item.parent) {
if (!tree[item.parent]) {
throw new Error(`Parent section ${item.parent} not found for item ${item.title}`);
@@ -27,9 +38,13 @@ function buildNavTree(navItems) {
if (!item.isSection) {
if (item.sectionId) {
- let section = tree[item.rootSectionId]?.children[item.sectionId];
+ const section = tree[item.rootSectionId]?.children[item.sectionId];
+
if (!section) {
- tree[item.rootSectionId].children[item.sectionId] = { item, children: [] };
+ tree[item.rootSectionId].children[item.sectionId] = {
+ item,
+ children: []
+ };
}
tree[item.rootSectionId].children[item.sectionId].children.push(item);
} else {
@@ -48,10 +63,11 @@ function buildNavTree(navItems) {
})
.reduce((obj, key) => {
obj[key] = allChildren[key];
+
return obj;
}, {});
- //handling subsections
+ // handling subsections
if (allChildrenKeys.length > 1) {
for (const key of allChildrenKeys) {
if (allChildren[key].children) {
@@ -62,73 +78,79 @@ function buildNavTree(navItems) {
// point in slug for specification subgroup to the latest specification version
if (rootKey === 'reference' && key === 'specification') {
- allChildren[key].item.href = allChildren[key].children.find(c => c.isPrerelease === undefined).slug;
+ allChildren[key].item.href = allChildren[key].children.find((c) => c.isPrerelease === undefined).slug;
}
}
}
}
return tree;
-
} catch (err) {
throw new Error(`Failed to build navigation tree: ${err.message}`);
}
}
-// A recursion function, works on the logic of Depth First Search to traverse all the root and child posts of the
+// A recursion function, works on the logic of Depth First Search to traverse all the root and child posts of the
// DocTree to get sequential order of the Doc Posts
const convertDocPosts = (docObject) => {
try {
- let docsArray = []
+ let docsArray = [];
+
// certain entries in the DocPosts are either a parent to many posts or itself a post.
- docsArray.push(docObject?.item || docObject)
+
+ docsArray.push(docObject?.item || docObject);
if (docObject.children) {
- let children = docObject.children
+ const { children } = docObject;
+
Object.keys(children).forEach((child) => {
- let docChildArray = convertDocPosts(children[child])
- docsArray = [...docsArray, ...docChildArray]
- })
+ const docChildArray = convertDocPosts(children[child]);
+
+ docsArray = [...docsArray, ...docChildArray];
+ });
}
- return docsArray
- }
- catch (err) {
+
+ return docsArray;
+ } catch (err) {
throw new Error('Error in convertDocPosts:', err);
}
-}
-
+};
function addDocButtons(docPosts, treePosts) {
let structuredPosts = [];
- let rootSections = [];
+ const rootSections = [];
try {
// Traversing the whole DocTree and storing each post inside them in sequential order
Object.keys(treePosts).forEach((rootElement) => {
structuredPosts.push(treePosts[rootElement].item);
if (treePosts[rootElement].children) {
- let children = treePosts[rootElement].children;
+ const { children } = treePosts[rootElement];
+
Object.keys(children).forEach((child) => {
- let docChildArray = convertDocPosts(children[child]);
+ const docChildArray = convertDocPosts(children[child]);
+
structuredPosts = [...structuredPosts, ...docChildArray];
});
}
});
// Appending the content of welcome page of Docs from the posts.json
- structuredPosts[0] = docPosts.filter(p => p.slug === '/docs')[0];
+ structuredPosts[0] = docPosts.filter((p) => p.slug === '/docs')[0];
// Traversing the structuredPosts in order to add `nextPage` and `prevPage` details for each page
- let countDocPages = structuredPosts.length;
+ const countDocPages = structuredPosts.length;
+
structuredPosts = structuredPosts.map((post, index) => {
- // post item specifying the root Section or sub-section in the docs are excluded as
- // they doesn't comprise any Doc Page or content to be shown in website.
+ // post item specifying the root Section or sub-section in the docs are excluded as
+ // they doesn't comprise any Doc Page or content to be shown in website.
if (post?.isRootSection || post?.isSection || index == 0) {
- if (post?.isRootSection || index == 0)
- rootSections.push(post.title)
- return post
+ if (post?.isRootSection || index == 0) rootSections.push(post.title);
+
+ return post;
}
- let nextPage = {}, prevPage = {}
+ let nextPage = {};
+ let prevPage = {};
let docPost = post;
// checks whether the next page for the current docPost item exists or not
@@ -139,14 +161,14 @@ function addDocButtons(docPosts, treePosts) {
nextPage = {
title: structuredPosts[index + 1].title,
href: structuredPosts[index + 1].slug
- }
+ };
} else {
nextPage = {
title: `${structuredPosts[index + 1].title} - ${structuredPosts[index + 2].title}`,
href: structuredPosts[index + 2].slug
- }
+ };
}
- docPost = { ...docPost, nextPage }
+ docPost = { ...docPost, nextPage };
}
// checks whether the previous page for the current docPost item exists or not
@@ -157,8 +179,8 @@ function addDocButtons(docPosts, treePosts) {
prevPage = {
title: structuredPosts[index - 1].title,
href: structuredPosts[index - 1].slug
- }
- docPost = { ...docPost, prevPage }
+ };
+ docPost = { ...docPost, prevPage };
} else {
// additonal check for the first page of Docs so that it doesn't give any Segementation fault
if (index - 2 >= 0) {
@@ -170,13 +192,14 @@ function addDocButtons(docPosts, treePosts) {
}
}
}
+
return docPost;
});
-
} catch (err) {
- throw new Error("An error occurred while adding doc buttons:", err);
+ throw new Error('An error occurred while adding doc buttons:', err);
}
+
return structuredPosts;
}
-module.exports = { buildNavTree, addDocButtons, convertDocPosts }
\ No newline at end of file
+export { addDocButtons, buildNavTree, convertDocPosts };
diff --git a/scripts/build-meetings.js b/scripts/build-meetings.ts
similarity index 54%
rename from scripts/build-meetings.js
rename to scripts/build-meetings.ts
index ee95803d9d44..30feba8a3dd7 100644
--- a/scripts/build-meetings.js
+++ b/scripts/build-meetings.ts
@@ -1,60 +1,66 @@
-const { writeFileSync } = require('fs');
-const { resolve } = require('path');
-const { google } = require('googleapis');
+import assert from 'assert';
+import { writeFileSync } from 'fs';
+import { google } from 'googleapis';
+import { resolve } from 'path';
-async function buildMeetings(writePath) {
+async function buildMeetings(writePath: string) {
let auth;
let calendar;
try {
auth = new google.auth.GoogleAuth({
scopes: ['https://www.googleapis.com/auth/calendar'],
- credentials: process.env.CALENDAR_SERVICE_ACCOUNT ? JSON.parse(process.env.CALENDAR_SERVICE_ACCOUNT) : undefined,
+ credentials: process.env.CALENDAR_SERVICE_ACCOUNT ? JSON.parse(process.env.CALENDAR_SERVICE_ACCOUNT) : undefined
});
calendar = google.calendar({ version: 'v3', auth });
-
} catch (err) {
+ assert(err instanceof Error);
throw new Error(`Authentication failed: ${err.message}`);
}
let eventsItems;
try {
- //cron job runs this always on midnight
+ // cron job runs this always on midnight
const currentTime = new Date(Date.now()).toISOString();
- const timeMin = new Date(
- Date.parse(currentTime) - 100 * 24 * 60 * 60 * 1000
- ).toISOString();
- const timeMax = new Date(
- Date.parse(currentTime) + 30 * 24 * 60 * 60 * 1000
- ).toISOString();
+ const timeMin = new Date(Date.parse(currentTime) - 100 * 24 * 60 * 60 * 1000).toISOString();
+ const timeMax = new Date(Date.parse(currentTime) + 30 * 24 * 60 * 60 * 1000).toISOString();
const eventsList = await calendar.events.list({
calendarId: process.env.CALENDAR_ID,
- timeMax: timeMax,
- timeMin: timeMin,
+ timeMax,
+ timeMin
});
+ // check if the response is valid and not undefined
+ if (!eventsList.data.items || !Array.isArray(eventsList.data.items)) {
+ throw new Error('Invalid data structure received from Google Calendar API');
+ }
+
eventsItems = eventsList.data.items.map((e) => {
+ if (!e.start || !e.start.dateTime) {
+ throw new Error('start.dateTime is missing in the event');
+ }
+
return {
title: e.summary,
calLink: e.htmlLink,
url:
e.extendedProperties?.private &&
`https://github.com/asyncapi/community/issues/${e.extendedProperties.private.ISSUE_ID}`,
- banner:
- e.extendedProperties?.private && e.extendedProperties.private.BANNER,
- date: new Date(e.start.dateTime),
+ banner: e.extendedProperties?.private && e.extendedProperties.private.BANNER,
+ date: new Date(e.start.dateTime)
};
});
const eventsForHuman = JSON.stringify(eventsItems, null, ' ');
+
console.log('The following events got fetched', eventsForHuman);
writeFileSync(writePath, eventsForHuman);
-
} catch (err) {
+ assert(err instanceof Error);
throw new Error(`Failed to fetch or process events: ${err.message}`);
}
}
@@ -64,4 +70,4 @@ if (require.main === module) {
buildMeetings(resolve(__dirname, '../config', 'meetings.json'));
}
-module.exports = { buildMeetings };
+export { buildMeetings };
diff --git a/scripts/build-newsroom-videos.js b/scripts/build-newsroom-videos.js
deleted file mode 100644
index 383927765d36..000000000000
--- a/scripts/build-newsroom-videos.js
+++ /dev/null
@@ -1,51 +0,0 @@
-const { writeFileSync } = require('fs-extra');
-const { resolve } = require('path');
-const fetch = require('node-fetch-2');
-
-async function buildNewsroomVideos(writePath) {
- try {
- const response = await fetch('https://youtube.googleapis.com/youtube/v3/search?' + new URLSearchParams({
- key: process.env.YOUTUBE_TOKEN,
- part: 'snippet',
- channelId: 'UCIz9zGwDLbrYQcDKVXdOstQ',
- eventType: 'completed',
- type: 'video',
- order: 'Date',
- maxResults: 5,
- }));
-
- if (!response.ok) {
- throw new Error(`HTTP error! with status code: ${response.status}`);
- }
-
- const data = await response.json();
- console.log(data);
-
- if (!data.items || !Array.isArray(data.items)) {
- throw new Error('Invalid data structure received from YouTube API');
- }
-
- const videoDataItems = data.items.map((video) => ({
- image_url: video.snippet.thumbnails.high.url,
- title: video.snippet.title,
- description: video.snippet.description,
- videoId: video.id.videoId,
- }));
-
- const videoData = JSON.stringify(videoDataItems, null, ' ');
- console.log('The following are the Newsroom Youtube videos: ', videoData);
-
- writeFileSync(writePath, videoData);
-
- return videoData;
- } catch (err) {
- throw new Error(`Failed to build newsroom videos: ${err.message}`);
- }
-}
-
-/* istanbul ignore next */
-if (require.main === module) {
- buildNewsroomVideos(resolve(__dirname, '../config', 'newsroom_videos.json'))
-}
-
-module.exports = { buildNewsroomVideos };
diff --git a/scripts/build-newsroom-videos.ts b/scripts/build-newsroom-videos.ts
new file mode 100644
index 000000000000..100afaa328fa
--- /dev/null
+++ b/scripts/build-newsroom-videos.ts
@@ -0,0 +1,71 @@
+import assert from 'assert';
+import { writeFileSync } from 'fs';
+import type { youtube_v3 } from 'googleapis';
+import { google } from 'googleapis';
+import { dirname, resolve } from 'path';
+import process from 'process';
+import { fileURLToPath } from 'url';
+
+const currentFilePath = fileURLToPath(import.meta.url);
+const currentDirPath = dirname(currentFilePath);
+
+const youtube = google.youtube({
+ version: 'v3',
+ auth: process.env.YOUTUBE_TOKEN
+});
+
+async function buildNewsroomVideos(writePath: string) {
+ try {
+ const response = await youtube.search.list({
+ part: ['snippet'],
+ channelId: 'UCIz9zGwDLbrYQcDKVXdOstQ',
+ eventType: 'completed',
+ type: ['video'],
+ order: 'date',
+ maxResults: 5
+ } as youtube_v3.Params$Resource$Search$List);
+
+ if (response.status !== 200) {
+ throw new Error(`HTTP error! with status code: ${response.status}`);
+ }
+
+ const data = await response.data;
+
+ console.log(data);
+
+ if (!data.items || !Array.isArray(data.items)) {
+ throw new Error('Invalid data structure received from YouTube API');
+ }
+
+ const videoDataItems = data.items.map((video) => {
+ if (!video.snippet) {
+ throw new Error('Invalid data structure received from YouTube API');
+ }
+
+ return {
+ image_url: video.snippet.thumbnails!.high!.url,
+ title: video.snippet.title,
+ description: video.snippet.description,
+ videoId: video.id!.videoId
+ };
+ });
+
+ const videoData = JSON.stringify(videoDataItems, null, ' ');
+
+ console.log('The following are the Newsroom Youtube videos: ', videoData);
+
+ writeFileSync(writePath, videoData);
+
+ return videoData;
+ } catch (err) {
+ assert(err instanceof Error);
+ throw new Error(`Failed to build newsroom videos: ${err.message}`);
+ }
+}
+
+/* istanbul ignore next */
+if (process.argv[1] === fileURLToPath(import.meta.url)) {
+ buildNewsroomVideos(resolve(currentDirPath, '../config', 'newsroom_videos.json'));
+}
+
+export { buildNewsroomVideos };
diff --git a/scripts/build-pages.js b/scripts/build-pages.ts
similarity index 89%
rename from scripts/build-pages.js
rename to scripts/build-pages.ts
index 48b3553e96b2..901adc8c4947 100644
--- a/scripts/build-pages.js
+++ b/scripts/build-pages.ts
@@ -1,5 +1,5 @@
-const fs = require('fs');
-const path = require('path');
+import fs from 'fs';
+import path from 'path';
const SRC_DIR = 'markdown';
const TARGET_DIR = 'pages';
@@ -11,16 +11,17 @@ if (!fs.existsSync(TARGET_DIR)) {
fs.mkdirSync(TARGET_DIR, { recursive: true });
}
-function capitalizeJsxTags(content) {
+export function capitalizeJsxTags(content: string) {
return content.replace(/<\/?(\w+)/g, function (match, letter) {
if (capitalizeTags.includes(letter.toLowerCase())) {
return `<${match[1] === '/' ? '/' : ''}${letter[0].toUpperCase()}${letter.slice(1)}`;
}
+
return match;
});
}
-function copyAndRenameFiles(srcDir, targetDir) {
+export function copyAndRenameFiles(srcDir: string, targetDir: string) {
// Read all files and directories from source directory
const entries = fs.readdirSync(srcDir, { withFileTypes: true });
@@ -56,5 +57,3 @@ function copyAndRenameFiles(srcDir, targetDir) {
}
copyAndRenameFiles(SRC_DIR, TARGET_DIR);
-
-module.exports = {copyAndRenameFiles,capitalizeJsxTags}
\ No newline at end of file
diff --git a/scripts/build-post-list.js b/scripts/build-post-list.js
deleted file mode 100644
index 288d7dc0c54e..000000000000
--- a/scripts/build-post-list.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const { readdirSync, statSync, existsSync, readFileSync, writeFileSync } = require('fs')
-const { resolve, basename } = require('path')
-const frontMatter = require('gray-matter')
-const toc = require('markdown-toc')
-const { slugify } = require('markdown-toc/lib/utils')
-const readingTime = require('reading-time')
-const { markdownToTxt } = require('markdown-to-txt')
-const { buildNavTree, addDocButtons } = require('./build-docs')
-
-let specWeight = 100
-const result = {
- docs: [],
- blog: [],
- about: [],
- docsTree: {}
-}
-const releaseNotes = []
-const basePath = 'pages'
-const postDirectories = [
- // order of these directories is important, as the blog should come before docs, to create a list of available release notes, which will later be used to release-note-link for spec docs
- [`${basePath}/blog`, '/blog'],
- [`${basePath}/docs`, '/docs'],
- [`${basePath}/about`, '/about']
-];
-
-const addItem = (details) => {
- if(details.slug.startsWith('/docs'))
- result["docs"].push(details)
- else if(details.slug.startsWith('/blog'))
- result["blog"].push(details)
- else if(details.slug.startsWith('/about'))
- result["about"].push(details)
- else {}
-}
-
-module.exports = async function buildPostList() {
- walkDirectories(postDirectories, result)
- const treePosts = buildNavTree(result["docs"].filter((p) => p.slug.startsWith('/docs/')))
- result["docsTree"] = treePosts
- result["docs"] = addDocButtons(result["docs"], treePosts)
- if (process.env.NODE_ENV === 'production') {
- // console.log(inspect(result, { depth: null, colors: true }))
- }
- writeFileSync(resolve(__dirname, '..', 'config', 'posts.json'), JSON.stringify(result, null, ' '))
-}
-
-function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, sectionId, rootSectionId) {
- for (let dir of directories) {
- let directory = dir[0]
- let sectionSlug = dir[1] || ''
- let files = readdirSync(directory);
-
- for (let file of files) {
- let details
- const fileName = [directory, file].join('/')
- const fileNameWithSection = [fileName, '_section.mdx'].join('/')
- const slug = fileName.replace(new RegExp(`^${basePath}`), '')
- const slugElements = slug.split('/');
- if (isDirectory(fileName)) {
- if (existsSync(fileNameWithSection)) {
- // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
- details = frontMatter(readFileSync(fileNameWithSection, 'utf-8'), {}).data
- details.title = details.title || capitalize(basename(fileName))
- } else {
- details = {
- title: capitalize(basename(fileName)),
- }
- }
- details.isSection = true
- if (slugElements.length > 3) {
- details.parent = slugElements[slugElements.length - 2]
- details.sectionId = slugElements[slugElements.length - 1]
- }
- if (!details.parent) {
- details.isRootSection = true
- details.rootSectionId = slugElements[slugElements.length - 1]
- }
- details.sectionWeight = sectionWeight
- details.slug = slug
- addItem(details)
- const rootId = details.parent || details.rootSectionId
- walkDirectories([[fileName, slug]], result, details.weight, details.title, details.sectionId, rootId)
- } else if (file.endsWith('.mdx') && !fileName.endsWith('/_section.mdx')) {
- const fileContent = readFileSync(fileName, 'utf-8')
- // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
- const { data, content } = frontMatter(fileContent, {})
- details = data
- details.toc = toc(content, { slugify: slugifyToC }).json
- details.readingTime = Math.ceil(readingTime(content).minutes)
- details.excerpt = details.excerpt || markdownToTxt(content).substr(0, 200)
- details.sectionSlug = sectionSlug || slug.replace(/\.mdx$/, '')
- details.sectionWeight = sectionWeight
- details.sectionTitle = sectionTitle
- details.sectionId = sectionId
- details.rootSectionId = rootSectionId
- details.id = fileName
- details.isIndex = fileName.endsWith('/index.mdx')
- details.slug = details.isIndex ? sectionSlug : slug.replace(/\.mdx$/, '')
- if(details.slug.includes('/reference/specification/') && !details.title) {
- const fileBaseName = basename(data.slug) // ex. v2.0.0 | v2.1.0-next-spec.1
- const fileName = fileBaseName.split('-')[0] // v2.0.0 | v2.1.0
- details.weight = specWeight--
-
- if (fileName.startsWith('v')) {
- details.title = capitalize(fileName.slice(1))
- } else {
- details.title = capitalize(fileName)
- }
-
- if(releaseNotes.includes(details.title)){
- details.releaseNoteLink = `/blog/release-notes-${details.title}`
- }
-
- if (fileBaseName.includes('next-spec') || fileBaseName.includes('next-major-spec')) {
- details.isPrerelease = true
- // this need to be separate because the `-` in "Pre-release" will get removed by `capitalize()` function
- details.title += " (Pre-release)"
- }
- if (fileBaseName.includes('explorer')) {
- details.title += " - Explorer"
- }
- }
-
- // To create a list of available ReleaseNotes list, which will be used to add details.releaseNoteLink attribute.
- if(file.startsWith("release-notes") && dir[1] === "/blog"){
- const fileName_without_extension = file.slice(0,-4)
- // removes the file extension. For example, release-notes-2.1.0.md -> release-notes-2.1.0
- const version = fileName_without_extension.slice(fileName_without_extension.lastIndexOf("-")+1)
-
- // gets the version from the name of the releaseNote .md file (from /blog). For example, version = 2.1.0 if fileName_without_extension = release-notes-2.1.0
- releaseNotes.push(version)
- // releaseNotes is the list of all available releaseNotes
- }
-
- addItem(details)
- }
- }
- }
-}
-
-function slugifyToC(str) {
- let slug
- // Try to match heading ids like {# myHeadingId}
- const headingIdMatch = str.match(/[\s]?\{\#([\w\d\-_]+)\}/)
- if (headingIdMatch && headingIdMatch.length >= 2) {
- slug = headingIdMatch[1]
- } else {
- // Try to match heading ids like {}
- const anchorTagMatch = str.match(/[\s]*= 2) slug = anchorTagMatch[1]
- }
- return slug || slugify(str, { firsth1: true, maxdepth: 6 })
-}
-
-function isDirectory(dir) {
- return statSync(dir).isDirectory()
-}
-
-function capitalize(text) {
- return text.split(/[\s\-]/g).map(word => `${word[0].toUpperCase()}${word.substr(1)}`).join(' ')
-}
diff --git a/scripts/build-post-list.ts b/scripts/build-post-list.ts
new file mode 100644
index 000000000000..2e4c56a3eec7
--- /dev/null
+++ b/scripts/build-post-list.ts
@@ -0,0 +1,177 @@
+import { existsSync, readdirSync, readFileSync, statSync, writeFileSync } from 'fs';
+import frontMatter from 'gray-matter';
+import { markdownToTxt } from 'markdown-to-txt';
+import toc from 'markdown-toc';
+import markdownTocUtils from 'markdown-toc/lib/utils.js';
+import { basename, dirname, resolve } from 'path';
+import readingTime from 'reading-time';
+import { fileURLToPath } from 'url';
+
+import { addDocButtons, buildNavTree } from './build-docs';
+
+const { slugify } = markdownTocUtils;
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+let specWeight = 100;
+const result = {
+ docs: [],
+ blog: [],
+ about: [],
+ docsTree: {}
+};
+const releaseNotes = [];
+const basePath = 'pages';
+const postDirectories = [
+ // order of these directories is important, as the blog should come before docs, to create a list of available release notes, which will later be used to release-note-link for spec docs
+ [`${basePath}/blog`, '/blog'],
+ [`${basePath}/docs`, '/docs'],
+ [`${basePath}/about`, '/about']
+];
+
+const addItem = (details) => {
+ if (details.slug.startsWith('/docs')) result.docs.push(details);
+ else if (details.slug.startsWith('/blog')) result.blog.push(details);
+ else if (details.slug.startsWith('/about')) result.about.push(details);
+ else {
+ }
+};
+
+export async function buildPostList() {
+ walkDirectories(postDirectories, result);
+ const treePosts = buildNavTree(result.docs.filter((p) => p.slug.startsWith('/docs/')));
+
+ result.docsTree = treePosts;
+ result.docs = addDocButtons(result.docs, treePosts);
+ if (process.env.NODE_ENV === 'production') {
+ // console.log(inspect(result, { depth: null, colors: true }))
+ }
+ writeFileSync(resolve(__dirname, '..', 'config', 'posts.json'), JSON.stringify(result, null, ' '));
+}
+
+function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, sectionId, rootSectionId) {
+ for (const dir of directories) {
+ const directory = dir[0];
+ const sectionSlug = dir[1] || '';
+ const files = readdirSync(directory);
+
+ for (const file of files) {
+ let details;
+ const fileName = [directory, file].join('/');
+ const fileNameWithSection = [fileName, '_section.mdx'].join('/');
+ const slug = fileName.replace(new RegExp(`^${basePath}`), '');
+ const slugElements = slug.split('/');
+
+ if (isDirectory(fileName)) {
+ if (existsSync(fileNameWithSection)) {
+ // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
+ details = frontMatter(readFileSync(fileNameWithSection, 'utf-8'), {}).data;
+ details.title = details.title || capitalize(basename(fileName));
+ } else {
+ details = {
+ title: capitalize(basename(fileName))
+ };
+ }
+ details.isSection = true;
+ if (slugElements.length > 3) {
+ details.parent = slugElements[slugElements.length - 2];
+ details.sectionId = slugElements[slugElements.length - 1];
+ }
+ if (!details.parent) {
+ details.isRootSection = true;
+ details.rootSectionId = slugElements[slugElements.length - 1];
+ }
+ details.sectionWeight = sectionWeight;
+ details.slug = slug;
+ addItem(details);
+ const rootId = details.parent || details.rootSectionId;
+
+ walkDirectories([[fileName, slug]], result, details.weight, details.title, details.sectionId, rootId);
+ } else if (file.endsWith('.mdx') && !fileName.endsWith('/_section.mdx')) {
+ const fileContent = readFileSync(fileName, 'utf-8');
+ // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
+ const { data, content } = frontMatter(fileContent, {});
+
+ details = data;
+ details.toc = toc(content, { slugify: slugifyToC }).json;
+ details.readingTime = Math.ceil(readingTime(content).minutes);
+ details.excerpt = details.excerpt || markdownToTxt(content).substr(0, 200);
+ details.sectionSlug = sectionSlug || slug.replace(/\.mdx$/, '');
+ details.sectionWeight = sectionWeight;
+ details.sectionTitle = sectionTitle;
+ details.sectionId = sectionId;
+ details.rootSectionId = rootSectionId;
+ details.id = fileName;
+ details.isIndex = fileName.endsWith('/index.mdx');
+ details.slug = details.isIndex ? sectionSlug : slug.replace(/\.mdx$/, '');
+ if (details.slug.includes('/reference/specification/') && !details.title) {
+ const fileBaseName = basename(data.slug); // ex. v2.0.0 | v2.1.0-next-spec.1
+ const fileName = fileBaseName.split('-')[0]; // v2.0.0 | v2.1.0
+
+ details.weight = specWeight--;
+
+ if (fileName.startsWith('v')) {
+ details.title = capitalize(fileName.slice(1));
+ } else {
+ details.title = capitalize(fileName);
+ }
+
+ if (releaseNotes.includes(details.title)) {
+ details.releaseNoteLink = `/blog/release-notes-${details.title}`;
+ }
+
+ if (fileBaseName.includes('next-spec') || fileBaseName.includes('next-major-spec')) {
+ details.isPrerelease = true;
+ // this need to be separate because the `-` in "Pre-release" will get removed by `capitalize()` function
+ details.title += ' (Pre-release)';
+ }
+ if (fileBaseName.includes('explorer')) {
+ details.title += ' - Explorer';
+ }
+ }
+
+ // To create a list of available ReleaseNotes list, which will be used to add details.releaseNoteLink attribute.
+ if (file.startsWith('release-notes') && dir[1] === '/blog') {
+ const fileName_without_extension = file.slice(0, -4);
+ // removes the file extension. For example, release-notes-2.1.0.md -> release-notes-2.1.0
+ const version = fileName_without_extension.slice(fileName_without_extension.lastIndexOf('-') + 1);
+
+ // gets the version from the name of the releaseNote .md file (from /blog). For example, version = 2.1.0 if fileName_without_extension = release-notes-2.1.0
+ releaseNotes.push(version);
+ // releaseNotes is the list of all available releaseNotes
+ }
+
+ addItem(details);
+ }
+ }
+ }
+}
+
+function slugifyToC(str) {
+ let slug;
+ // Try to match heading ids like {# myHeadingId}
+ const headingIdMatch = str.match(/[\s]?\{\#([\w\d\-_]+)\}/);
+
+ if (headingIdMatch && headingIdMatch.length >= 2) {
+ slug = headingIdMatch[1];
+ } else {
+ // Try to match heading ids like {}
+ const anchorTagMatch = str.match(/[\s]*= 2) slug = anchorTagMatch[1];
+ }
+
+ return slug || slugify(str, { firsth1: true, maxdepth: 6 });
+}
+
+function isDirectory(dir) {
+ return statSync(dir).isDirectory();
+}
+
+function capitalize(text) {
+ return text
+ .split(/[\s\-]/g)
+ .map((word) => `${word[0].toUpperCase()}${word.substr(1)}`)
+ .join(' ');
+}
diff --git a/scripts/build-rss.js b/scripts/build-rss.js
deleted file mode 100644
index 673da1398fe0..000000000000
--- a/scripts/build-rss.js
+++ /dev/null
@@ -1,103 +0,0 @@
-const fs = require('fs').promises
-const json2xml = require('jgexml/json2xml')
-
-function getAllPosts() {
- return require('../config/posts.json');
-}
-
-function clean(s) {
- s = s.split('<span>').join('')
- s = s.split('&').join('&')
- s = s.split(''').join("'")
- s = s.split('<').join('<')
- s = s.split('>').join('>')
- s = s.split('"').join('"')
- return s
-}
-
-module.exports = async function rssFeed(type, title, desc, outputPath) {
- try {
-
- let posts = getAllPosts()[`${type}`]
- const missingDatePosts = posts.filter(post => !post.date);
- posts = posts.filter(post => post.date);
- posts.sort((i1, i2) => {
- const i1Date = new Date(i1.date);
- const i2Date = new Date(i2.date);
- if (i1.featured && !i2.featured) return -1;
- if (!i1.featured && i2.featured) return 1;
- return i2Date - i1Date;
- });
-
- if (missingDatePosts.length > 0) {
- throw new Error(`Missing date in posts: ${missingDatePosts.map(p => p.title || p.slug).join(', ')}`);
- }
-
- const base = 'https://www.asyncapi.com'
- const tracking = '?utm_source=rss';
-
- const feed = {}
- const rss = {}
- rss['@version'] = '2.0'
- rss["@xmlns:atom"] = 'http://www.w3.org/2005/Atom'
- rss.channel = {}
- rss.channel.title = title
- rss.channel.link = `${base}/${outputPath}`
- rss.channel["atom:link"] = {}
- rss.channel["atom:link"]["@rel"] = 'self'
- rss.channel["atom:link"]["@href"] = rss.channel.link
- rss.channel["atom:link"]["@type"] = 'application/rss+xml'
- rss.channel.description = desc
- rss.channel.language = 'en-gb';
- rss.channel.copyright = 'Made with :love: by the AsyncAPI Initiative.';
- rss.channel.webMaster = 'info@asyncapi.io (AsyncAPI Initiative)'
- rss.channel.pubDate = new Date().toUTCString()
- rss.channel.generator = 'next.js'
- rss.channel.item = []
-
- const invalidPosts = posts.filter(post =>
- !post.title || !post.slug || !post.excerpt || !post.date
- );
-
- if (invalidPosts.length > 0) {
- throw new Error(`Missing required fields in posts: ${invalidPosts.map(p => p.title || p.slug).join(', ')}`);
- }
-
- for (let post of posts) {
- const link = `${base}${post.slug}${tracking}`;
- const { title, excerpt, date } = post;
- const pubDate = new Date(date).toUTCString();
- const description = clean(excerpt);
- const guid = { '@isPermaLink': true, '': link };
- const item = {
- title,
- description,
- link,
- category: type,
- guid,
- pubDate
- };
- if (post.cover) {
- const enclosure = {};
- enclosure["@url"] = base + post.cover;
- enclosure["@length"] = 15026; // dummy value, anything works
- enclosure["@type"] = 'image/jpeg';
- if (typeof enclosure["@url"] === 'string') {
- let tmp = enclosure["@url"].toLowerCase();
- if (tmp.indexOf('.png') >= 0) enclosure["@type"] = 'image/png';
- if (tmp.indexOf('.svg') >= 0) enclosure["@type"] = 'image/svg+xml';
- if (tmp.indexOf('.webp') >= 0) enclosure["@type"] = 'image/webp';
- }
- item.enclosure = enclosure;
- }
- rss.channel.item.push(item)
- }
-
- feed.rss = rss
-
- const xml = json2xml.getXml(feed, '@', '', 2);
- await fs.writeFile(`./public/${outputPath}`, xml, 'utf8');
- } catch (err) {
- throw new Error(`Failed to generate RSS feed: ${err.message}`);
- }
-};
diff --git a/scripts/build-rss.ts b/scripts/build-rss.ts
new file mode 100644
index 000000000000..28298b7bb7e6
--- /dev/null
+++ b/scripts/build-rss.ts
@@ -0,0 +1,120 @@
+import assert from 'assert';
+import fs from 'fs/promises';
+import json2xml from 'jgexml/json2xml';
+
+import type { BlogPostTypes, Enclosure, RSS, RSSItemType } from '@/types/scripts/build-rss';
+
+async function getAllPosts() {
+ const posts = (await import('../config/posts.json', { assert: { type: 'json' } })).default;
+
+ return posts;
+}
+
+function clean(s: string) {
+ let cleanS = s;
+
+ cleanS = cleanS.split('<span>').join('');
+ cleanS = cleanS.split('&').join('&');
+ cleanS = cleanS.split(''').join("'");
+ cleanS = cleanS.split('<').join('<');
+ cleanS = cleanS.split('>').join('>');
+ cleanS = cleanS.split('"').join('"');
+
+ return cleanS;
+}
+
+export async function rssFeed(type: BlogPostTypes, rssTitle: string, desc: string, outputPath: string) {
+ try {
+ let posts = (await getAllPosts())[`${type}`] as any[];
+ const missingDatePosts = posts.filter((post) => !post.date);
+
+ posts = posts.filter((post) => post.date);
+ posts.sort((i1, i2) => {
+ const i1Date = new Date(i1.date);
+ const i2Date = new Date(i2.date);
+
+ if (i1.featured && !i2.featured) return -1;
+ if (!i1.featured && i2.featured) return 1;
+
+ return i2Date.getTime() - i1Date.getTime();
+ });
+
+ if (missingDatePosts.length > 0) {
+ throw new Error(`Missing date in posts: ${missingDatePosts.map((p) => p.title || p.slug).join(', ')}`);
+ }
+
+ const base = 'https://www.asyncapi.com';
+ const tracking = '?utm_source=rss';
+
+ const feed = {} as { rss: RSS };
+ const rss = {} as RSS;
+
+ rss['@version'] = '2.0';
+ rss['@xmlns:atom'] = 'http://www.w3.org/2005/Atom';
+ rss.channel = {} as RSS['channel'];
+ rss.channel.title = rssTitle;
+ rss.channel.link = `${base}/${outputPath}`;
+ rss.channel['atom:link'] = {} as RSS['channel']['atom:link'];
+ rss.channel['atom:link']['@rel'] = 'self';
+ rss.channel['atom:link']['@href'] = rss.channel.link;
+ rss.channel['atom:link']['@type'] = 'application/rss+xml';
+ rss.channel.description = desc;
+ rss.channel.language = 'en-gb';
+ rss.channel.copyright = 'Made with :love: by the AsyncAPI Initiative.';
+ rss.channel.webMaster = 'info@asyncapi.io (AsyncAPI Initiative)';
+ rss.channel.pubDate = new Date().toUTCString();
+ rss.channel.generator = 'next.js';
+ rss.channel.item = [];
+
+ const invalidPosts = posts.filter((post) => !post.title || !post.slug || !post.excerpt || !post.date);
+
+ if (invalidPosts.length > 0) {
+ throw new Error(`Missing required fields in posts: ${invalidPosts.map((p) => p.title || p.slug).join(', ')}`);
+ }
+
+ for (const post of posts) {
+ const link = `${base}${post.slug}${tracking}`;
+ const { title, excerpt, date } = post;
+ const pubDate = new Date(date).toUTCString();
+ const description = clean(excerpt);
+ const guid = { '@isPermaLink': true, '': link };
+ const item: RSSItemType = {
+ title,
+ description,
+ link,
+ category: type,
+ guid,
+ pubDate
+ } as RSSItemType;
+
+ if (post.cover) {
+ const enclosure = {} as Enclosure;
+
+ enclosure['@url'] = base + post.cover;
+ enclosure['@length'] = 15026; // dummy value, anything works
+ enclosure['@type'] = 'image/jpeg';
+ if (typeof enclosure['@url'] === 'string') {
+ const tmp = enclosure['@url'].toLowerCase();
+
+ // eslint-disable-next-line max-depth
+ if (tmp.indexOf('.png') >= 0) enclosure['@type'] = 'image/png';
+ // eslint-disable-next-line max-depth
+ if (tmp.indexOf('.svg') >= 0) enclosure['@type'] = 'image/svg+xml';
+ // eslint-disable-next-line max-depth
+ if (tmp.indexOf('.webp') >= 0) enclosure['@type'] = 'image/webp';
+ }
+ item.enclosure = enclosure;
+ }
+ rss.channel.item.push(item);
+ }
+
+ feed.rss = rss;
+
+ const xml = json2xml.getXml(feed, '@', '', 2);
+
+ await fs.writeFile(`./public/${outputPath}`, xml, 'utf8');
+ } catch (err) {
+ assert(err instanceof Error);
+ throw new Error(`Failed to generate RSS feed: ${err.message}`);
+ }
+}
diff --git a/scripts/build-tools.js b/scripts/build-tools.js
deleted file mode 100644
index c5cce74a7cb1..000000000000
--- a/scripts/build-tools.js
+++ /dev/null
@@ -1,30 +0,0 @@
-const { getData } = require('./tools/extract-tools-github');
-const { convertTools } = require('./tools/tools-object');
-const { combineTools } = require('./tools/combine-tools');
-const fs = require('fs-extra');
-const { resolve } = require('path');
-
-const buildTools = async (automatedToolsPath, manualToolsPath, toolsPath, tagsPath) => {
- try {
- let githubExtractData = await getData();
- let automatedTools = await convertTools(githubExtractData);
-
- await fs.writeFile(automatedToolsPath, JSON.stringify(automatedTools, null, ' '));
-
- await combineTools(automatedTools, require(manualToolsPath), toolsPath, tagsPath);
- } catch (err) {
- throw new Error(`An error occurred while building tools: ${err.message}`);
- }
-};
-
-/* istanbul ignore next */
-if (require.main === module) {
- const automatedToolsPath = resolve(__dirname, '../config', 'tools-automated.json');
- const manualToolsPath = resolve(__dirname, '../config', 'tools-manual.json');
- const toolsPath = resolve(__dirname, '../config', 'tools.json');
- const tagsPath = resolve(__dirname, '../config', 'all-tags.json');
-
- buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath);
-}
-
-module.exports = { buildTools };
diff --git a/scripts/build-tools.ts b/scripts/build-tools.ts
new file mode 100644
index 000000000000..72a0fa2b4af0
--- /dev/null
+++ b/scripts/build-tools.ts
@@ -0,0 +1,38 @@
+import assert from 'assert';
+import fs from 'fs-extra';
+import { dirname, resolve } from 'path';
+import { fileURLToPath } from 'url';
+
+import { combineTools } from './tools/combine-tools';
+import { getData } from './tools/extract-tools-github';
+import { convertTools } from './tools/tools-object';
+
+const currentFilePath = fileURLToPath(import.meta.url);
+const currentDirPath = dirname(currentFilePath);
+
+const buildTools = async (automatedToolsPath: string, manualToolsPath: string, toolsPath: string, tagsPath: string) => {
+ try {
+ const githubExtractData = await getData();
+ const automatedTools = await convertTools(githubExtractData);
+
+ await fs.writeFile(automatedToolsPath, JSON.stringify(automatedTools, null, ' '));
+
+ // eslint-disable-next-line import/no-dynamic-require, global-require
+ await combineTools(automatedTools, require(manualToolsPath), toolsPath, tagsPath);
+ } catch (err) {
+ assert(err instanceof Error);
+ throw new Error(`An error occurred while building tools: ${err.message}`);
+ }
+};
+
+/* istanbul ignore next */
+if (process.argv[1] === fileURLToPath(import.meta.url)) {
+ const automatedToolsPath = resolve(currentDirPath, '../config', 'tools-automated.json');
+ const manualToolsPath = resolve(currentDirPath, '../config', 'tools-manual.json');
+ const toolsPath = resolve(currentDirPath, '../config', 'tools.json');
+ const tagsPath = resolve(currentDirPath, '../config', 'all-tags.json');
+
+ buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath);
+}
+
+export { buildTools };
diff --git a/scripts/casestudies/index.js b/scripts/casestudies/index.js
deleted file mode 100644
index 77695e06fd38..000000000000
--- a/scripts/casestudies/index.js
+++ /dev/null
@@ -1,19 +0,0 @@
-const { readdir, writeFile, readFile } = require('fs').promises;
-const { convertToJson } = require('../../scripts/utils');
-
-module.exports = async function buildCaseStudiesList(dirWithCaseStudy, writeFilePath) {
- try {
- let files = await readdir(dirWithCaseStudy);
- let caseStudiesList = [];
- for (let file of files) {
- const caseStudyFileName = [dirWithCaseStudy, file].join('/');
- const caseStudyContent = await readFile(caseStudyFileName, 'utf-8');
- const jsonContent = convertToJson(caseStudyContent);
-
- caseStudiesList.push(jsonContent);
- await writeFile(writeFilePath, JSON.stringify(caseStudiesList))
- }
- } catch (err) {
- throw new Error(err);
- }
-};
diff --git a/scripts/casestudies/index.ts b/scripts/casestudies/index.ts
new file mode 100644
index 000000000000..1db47f373e0c
--- /dev/null
+++ b/scripts/casestudies/index.ts
@@ -0,0 +1,26 @@
+import { readdir, readFile, writeFile } from 'fs/promises';
+
+import { convertToJson } from '../utils';
+
+export async function buildCaseStudiesList(dirWithCaseStudy: string, writeFilePath: string) {
+ try {
+ const files = await readdir(dirWithCaseStudy);
+
+ // Process all files in parallel using Promise.all
+ const caseStudiesList = await Promise.all(
+ files.map(async (file) => {
+ const caseStudyFileName = [dirWithCaseStudy, file].join('/');
+ const caseStudyContent = await readFile(caseStudyFileName, 'utf-8');
+
+ return convertToJson(caseStudyContent);
+ })
+ );
+
+ // Write the complete list once after all files are processed
+ await writeFile(writeFilePath, JSON.stringify(caseStudiesList));
+
+ return caseStudiesList;
+ } catch (err) {
+ throw new Error(err instanceof Error ? err.message : String(err));
+ }
+}
diff --git a/scripts/compose.js b/scripts/compose.ts
similarity index 79%
rename from scripts/compose.js
rename to scripts/compose.ts
index 8c4f0e3a4a36..3339c51d261b 100644
--- a/scripts/compose.js
+++ b/scripts/compose.ts
@@ -2,25 +2,30 @@
* Script based on https://github.com/timlrx/tailwind-nextjs-starter-blog/blob/master/scripts/compose.js
*/
-const fs = require('fs')
-const inquirer = require('inquirer')
-const dedent = require('dedent')
-const moment = require('moment')
-
-const genFrontMatter = (answers) => {
- let d = new Date()
- const date = [
- d.getFullYear(),
- ('0' + (d.getMonth() + 1)).slice(-2),
- ('0' + d.getDate()).slice(-2),
- ].join('-')
- const tagArray = answers.tags.split(',')
- tagArray.forEach((tag, index) => (tagArray[index] = tag.trim()))
- const tags = "'" + tagArray.join("','") + "'"
+import dedent from 'dedent';
+import fs from 'fs';
+import inquirer from 'inquirer';
+import moment from 'moment';
+
+type ComposePromptType = {
+ title: string;
+ excerpt: string;
+ tags: string;
+ type: string;
+ canonical: string;
+};
+
+const genFrontMatter = (answers: ComposePromptType) => {
+ const tagArray = answers.tags.split(',');
+
+ tagArray.forEach((tag: string, index: number) => {
+ tagArray[index] = tag.trim();
+ });
+ const tags = `'${tagArray.join("','")}'`;
let frontMatter = dedent`---
title: ${answers.title ? answers.title : 'Untitled'}
- date: ${moment().format("YYYY-MM-DDTh:mm:ssZ")}
+ date: ${moment().format('YYYY-MM-DDTh:mm:ssZ')}
type: ${answers.type}
canonical: ${answers.canonical ? answers.canonical : ''}
tags: [${answers.tags ? tags : ''}]
@@ -90,64 +95,65 @@ const genFrontMatter = (answers) => {
- `
+ `;
- frontMatter = frontMatter + '\n---'
+ frontMatter += '\n---';
- return frontMatter
-}
+ return frontMatter;
+};
inquirer
.prompt([
{
name: 'title',
message: 'Enter post title:',
- type: 'input',
+ type: 'input'
},
{
name: 'excerpt',
message: 'Enter post excerpt:',
- type: 'input',
+ type: 'input'
},
{
name: 'tags',
message: 'Any Tags? Separate them with , or leave empty if no tags.',
- type: 'input',
+ type: 'input'
},
{
name: 'type',
message: 'Enter the post type:',
type: 'list',
- choices: ['Communication', 'Community', 'Engineering', 'Marketing', 'Strategy', 'Video'],
+ choices: ['Communication', 'Community', 'Engineering', 'Marketing', 'Strategy', 'Video']
},
{
name: 'canonical',
message: 'Enter the canonical URL if any:',
- type: 'input',
- },
+ type: 'input'
+ }
])
- .then((answers) => {
+ .then((answers: ComposePromptType) => {
// Remove special characters and replace space with -
const fileName = answers.title
.toLowerCase()
.replace(/[^a-zA-Z0-9 ]/g, '')
.replace(/ /g, '-')
- .replace(/-+/g, '-')
- const frontMatter = genFrontMatter(answers)
- const filePath = `pages/blog/${fileName ? fileName : 'untitled'}.md`
+ .replace(/-+/g, '-');
+ const frontMatter = genFrontMatter(answers);
+ const filePath = `pages/blog/${fileName || 'untitled'}.md`;
+
fs.writeFile(filePath, frontMatter, { flag: 'wx' }, (err) => {
if (err) {
- throw err
+ throw err;
} else {
- console.log(`Blog post generated successfully at ${filePath}`)
+ console.log(`Blog post generated successfully at ${filePath}`);
}
- })
+ });
})
.catch((error) => {
- console.error(error)
+ console.error(error);
if (error.isTtyError) {
- console.log("Prompt couldn't be rendered in the current environment")
+ console.log("Prompt couldn't be rendered in the current environment");
} else {
- console.log('Something went wrong, sorry!')
+ console.log('Something went wrong, sorry!');
}
- })
+ });
diff --git a/scripts/dashboard/build-dashboard.js b/scripts/dashboard/build-dashboard.ts
similarity index 91%
rename from scripts/dashboard/build-dashboard.js
rename to scripts/dashboard/build-dashboard.ts
index c20be204e87b..b616111484f2 100644
--- a/scripts/dashboard/build-dashboard.js
+++ b/scripts/dashboard/build-dashboard.ts
@@ -1,7 +1,8 @@
-const { writeFile } = require('fs-extra');
-const { resolve } = require('path');
-const { graphql } = require('@octokit/graphql');
-const { Queries } = require('./issue-queries');
+import { graphql } from '@octokit/graphql';
+import { writeFile } from 'fs-extra';
+import { resolve } from 'path';
+
+import { Queries } from './issue-queries';
/**
* Introduces a delay in the execution flow.
@@ -26,7 +27,7 @@ async function getDiscussions(query, pageSize, endCursor = null) {
if (result.rateLimit.remaining <= 100) {
console.log(
- `[WARNING] GitHub GraphQL rateLimit`,
+ '[WARNING] GitHub GraphQL rateLimit',
`cost = ${result.rateLimit.cost}`,
`limit = ${result.rateLimit.limit}`,
`remaining = ${result.rateLimit.remaining}`,
@@ -41,9 +42,11 @@ async function getDiscussions(query, pageSize, endCursor = null) {
if (!hasNextPage) {
return result.search.nodes;
}
+
return result.search.nodes.concat(await getDiscussions(query, pageSize, result.search.pageInfo.endCursor));
} catch (e) {
console.error(e);
+
return Promise.reject(e);
}
}
@@ -60,6 +63,7 @@ async function getDiscussionByID(isPR, id) {
return result;
} catch (e) {
console.error(e);
+
return Promise.reject(e);
}
}
@@ -69,8 +73,10 @@ async function processHotDiscussions(batch) {
batch.map(async (discussion) => {
try {
const isPR = discussion.__typename === 'PullRequest';
+
if (discussion.comments.pageInfo.hasNextPage) {
const fetchedDiscussion = await getDiscussionByID(isPR, discussion.id);
+
discussion = fetchedDiscussion.node;
}
@@ -81,8 +87,8 @@ async function processHotDiscussions(batch) {
const finalInteractionsCount = isPR
? interactionsCount +
- discussion.reviews.totalCount +
- discussion.reviews.nodes.reduce((acc, curr) => acc + curr.comments.totalCount, 0)
+ discussion.reviews.totalCount +
+ discussion.reviews.nodes.reduce((acc, curr) => acc + curr.comments.totalCount, 0)
: interactionsCount;
return {
@@ -111,12 +117,14 @@ async function getHotDiscussions(discussions) {
for (let i = 0; i < discussions.length; i += batchSize) {
const batch = discussions.slice(i, i + batchSize);
const batchResults = await processHotDiscussions(batch);
+
await pause(1000);
result.push(...batchResults);
}
result.sort((ElemA, ElemB) => ElemB.score - ElemA.score);
const filteredResult = result.filter((issue) => issue.author !== 'asyncapi-bot');
+
return filteredResult.slice(0, 12);
}
@@ -149,6 +157,7 @@ async function mapGoodFirstIssues(issues) {
function getLabel(issue, filter) {
const result = issue.labels.nodes.find((label) => label.name.startsWith(filter));
+
return result?.name.split('/')[1];
}
@@ -156,6 +165,7 @@ function monthsSince(date) {
const seconds = Math.floor((new Date() - new Date(date)) / 1000);
// 2592000 = number of seconds in a month = 30 * 24 * 60 * 60
const months = seconds / 2592000;
+
return Math.floor(months);
}
@@ -169,6 +179,7 @@ async function start(writePath) {
getHotDiscussions(discussions),
mapGoodFirstIssues(rawGoodFirstIssues)
]);
+
return await writeToFile({ hotDiscussions, goodFirstIssues }, writePath);
} catch (e) {
console.log('There were some issues parsing data from github.');
@@ -181,4 +192,14 @@ if (require.main === module) {
start(resolve(__dirname, '..', '..', 'dashboard.json'));
}
-module.exports = { getLabel, monthsSince, mapGoodFirstIssues, getHotDiscussions, getDiscussionByID, getDiscussions, writeToFile, start, processHotDiscussions };
+export {
+ getDiscussionByID,
+ getDiscussions,
+ getHotDiscussions,
+ getLabel,
+ mapGoodFirstIssues,
+ monthsSince,
+ processHotDiscussions,
+ start,
+ writeToFile
+};
diff --git a/scripts/dashboard/issue-queries.js b/scripts/dashboard/issue-queries.ts
similarity index 99%
rename from scripts/dashboard/issue-queries.js
rename to scripts/dashboard/issue-queries.ts
index 629214a9ea90..7eafae9fbd3a 100644
--- a/scripts/dashboard/issue-queries.js
+++ b/scripts/dashboard/issue-queries.ts
@@ -1,4 +1,4 @@
-exports.Queries = Object.freeze({
+export const Queries = Object.freeze({
pullRequestById: `
query IssueByID($id: ID!) {
node(id: $id) {
@@ -274,5 +274,5 @@ query($first: Int!, $after: String) {
resetAt
}
}
-`,
+`
});
diff --git a/scripts/finance/index.js b/scripts/finance/index.js
deleted file mode 100644
index 3f4a5edcfb6e..000000000000
--- a/scripts/finance/index.js
+++ /dev/null
@@ -1,25 +0,0 @@
-const {
- promises: { mkdir }
-} = require('fs');
-const { resolve } = require('path');
-const writeJSON = require('../utils/readAndWriteJson.js');
-
-module.exports = async function buildFinanceInfoList({ currentDir, configDir, financeDir, year, jsonDataDir }) {
- try {
- const expensesPath = resolve(currentDir, configDir, financeDir, year, 'Expenses.yml');
- const expensesLinkPath = resolve(currentDir, configDir, financeDir, year, 'ExpensesLink.yml');
-
- // Ensure the directory exists before writing the files
- const jsonDirectory = resolve(currentDir, configDir, financeDir, jsonDataDir);
- await mkdir(jsonDirectory, { recursive: true });
-
- // Write Expenses and ExpensesLink to JSON files
- const expensesJsonPath = resolve(jsonDirectory, 'Expenses.json');
- await writeJSON(expensesPath, expensesJsonPath);
-
- const expensesLinkJsonPath = resolve(jsonDirectory, 'ExpensesLink.json');
- await writeJSON(expensesLinkPath, expensesLinkJsonPath);
- } catch (err) {
- throw new Error(err);
- }
-};
\ No newline at end of file
diff --git a/scripts/finance/index.ts b/scripts/finance/index.ts
new file mode 100644
index 000000000000..2fddbf7f41ab
--- /dev/null
+++ b/scripts/finance/index.ts
@@ -0,0 +1,43 @@
+import assert from 'assert';
+import { mkdir } from 'fs/promises';
+import { resolve } from 'path';
+
+import { writeJSON } from '../utils/readAndWriteJson';
+
+interface BuildFinanceInfoListProps {
+ currentDir: string;
+ configDir: string;
+ financeDir: string;
+ year: string;
+ jsonDataDir: string;
+}
+
+export async function buildFinanceInfoList({
+ currentDir,
+ configDir,
+ financeDir,
+ year,
+ jsonDataDir
+}: BuildFinanceInfoListProps) {
+ try {
+ const expensesPath = resolve(currentDir, configDir, financeDir, year, 'Expenses.yml');
+ const expensesLinkPath = resolve(currentDir, configDir, financeDir, year, 'ExpensesLink.yml');
+
+ // Ensure the directory exists before writing the files
+ const jsonDirectory = resolve(currentDir, configDir, financeDir, jsonDataDir);
+
+ await mkdir(jsonDirectory, { recursive: true });
+
+ // Write Expenses and ExpensesLink to JSON files
+ const expensesJsonPath = resolve(jsonDirectory, 'Expenses.json');
+
+ await writeJSON(expensesPath, expensesJsonPath);
+
+ const expensesLinkJsonPath = resolve(jsonDirectory, 'ExpensesLink.json');
+
+ await writeJSON(expensesLinkPath, expensesLinkJsonPath);
+ } catch (err) {
+ assert(err instanceof Error);
+ throw new Error(err.message);
+ }
+}
diff --git a/scripts/index.js b/scripts/index.ts
similarity index 55%
rename from scripts/index.js
rename to scripts/index.ts
index 33125fe7533b..96fcafcb384e 100644
--- a/scripts/index.js
+++ b/scripts/index.ts
@@ -1,23 +1,20 @@
-const { resolve } = require('path');
-const fs = require('fs');
-const rssFeed = require('./build-rss');
-const buildPostList = require('./build-post-list');
-const buildCaseStudiesList = require('./casestudies');
-const buildAdoptersList = require('./adopters');
-const buildFinanceInfoList = require('./finance');
+import fs from 'fs';
+import { dirname, resolve } from 'path';
+import { fileURLToPath } from 'url';
+
+import { buildAdoptersList } from './adopters/index';
+import { buildPostList } from './build-post-list';
+import { rssFeed } from './build-rss';
+import { buildCaseStudiesList } from './casestudies/index';
+import { buildFinanceInfoList } from './finance/index';
+
+const currentFilePath = fileURLToPath(import.meta.url);
+const currentDirPath = dirname(currentFilePath);
async function start() {
await buildPostList();
- rssFeed(
- 'blog',
- 'AsyncAPI Initiative Blog RSS Feed',
- 'AsyncAPI Initiative Blog',
- 'rss.xml'
- );
- await buildCaseStudiesList(
- 'config/casestudies',
- resolve(__dirname, '../config', 'case-studies.json')
- );
+ rssFeed('blog', 'AsyncAPI Initiative Blog RSS Feed', 'AsyncAPI Initiative Blog', 'rss.xml');
+ await buildCaseStudiesList('config/casestudies', resolve(currentDirPath, '../config', 'case-studies.json'));
await buildAdoptersList();
const financeDir = resolve('.', 'config', 'finance');
@@ -48,6 +45,6 @@ async function start() {
});
}
-module.exports = start;
+export { start };
start();
diff --git a/scripts/markdown/check-markdown.js b/scripts/markdown/check-markdown.js
deleted file mode 100644
index 8979f7e0b4ab..000000000000
--- a/scripts/markdown/check-markdown.js
+++ /dev/null
@@ -1,146 +0,0 @@
-const fs = require('fs');
-const matter = require('gray-matter');
-const path = require('path');
-
-/**
- * Checks if a given string is a valid URL.
- * @param {string} str - The string to validate as a URL.
- * @returns {boolean} True if the string is a valid URL, false otherwise.
- */
-function isValidURL(str) {
- try {
- new URL(str);
- return true;
- } catch (err) {
- return false;
- }
-}
-
-/**
- * Validates the frontmatter of a blog post.
- * @param {object} frontmatter - The frontmatter object to validate.
- * @param {string} filePath - The path to the file being validated.
- * @returns {string[]|null} An array of validation error messages, or null if no errors.
- */
-function validateBlogs(frontmatter) {
- const requiredAttributes = ['title', 'date', 'type', 'tags', 'cover', 'authors'];
- const errors = [];
-
- // Check for required attributes
- requiredAttributes.forEach(attr => {
- if (!frontmatter.hasOwnProperty(attr)) {
- errors.push(`${attr} is missing`);
- }
- });
-
- // Validate date format
- if (frontmatter.date && Number.isNaN(Date.parse(frontmatter.date))) {
- errors.push(`Invalid date format: ${frontmatter.date}`);
- }
-
- // Validate tags format (must be an array)
- if (frontmatter.tags && !Array.isArray(frontmatter.tags)) {
- errors.push(`Tags should be an array`);
- }
-
- // Validate cover is a string
- if (frontmatter.cover && typeof frontmatter.cover !== 'string') {
- errors.push(`Cover must be a string`);
- }
-
- // Validate authors (must be an array with valid attributes)
- if (frontmatter.authors) {
- if (!Array.isArray(frontmatter.authors)) {
- errors.push('Authors should be an array');
- } else {
- frontmatter.authors.forEach((author, index) => {
- if (!author.name) {
- errors.push(`Author at index ${index} is missing a name`);
- }
- if (author.link && !isValidURL(author.link)) {
- errors.push(`Invalid URL for author at index ${index}: ${author.link}`);
- }
- if (!author.photo) {
- errors.push(`Author at index ${index} is missing a photo`);
- }
- });
- }
- }
-
- return errors.length ? errors : null;
-}
-
-/**
- * Validates the frontmatter of a documentation file.
- * @param {object} frontmatter - The frontmatter object to validate.
- * @param {string} filePath - The path to the file being validated.
- * @returns {string[]|null} An array of validation error messages, or null if no errors.
- */
-function validateDocs(frontmatter) {
- const errors = [];
-
- // Check if title exists and is a string
- if (!frontmatter.title || typeof frontmatter.title !== 'string') {
- errors.push('Title is missing or not a string');
- }
-
- // Check if weight exists and is a number
- if (frontmatter.weight === undefined || typeof frontmatter.weight !== 'number') {
- errors.push('Weight is missing or not a number');
- }
-
- return errors.length ? errors : null;
-}
-
-/**
- * Recursively checks markdown files in a folder and validates their frontmatter.
- * @param {string} folderPath - The path to the folder to check.
- * @param {Function} validateFunction - The function used to validate the frontmatter.
- * @param {string} [relativePath=''] - The relative path of the folder for logging purposes.
- */
-function checkMarkdownFiles(folderPath, validateFunction, relativePath = '') {
- fs.readdir(folderPath, (err, files) => {
- if (err) {
- console.error('Error reading directory:', err);
- return;
- }
-
- files.forEach(file => {
- const filePath = path.join(folderPath, file);
- const relativeFilePath = path.join(relativePath, file);
-
- // Skip the folder 'docs/reference/specification'
- if (relativeFilePath.includes('reference/specification')) {
- return;
- }
-
- fs.stat(filePath, (err, stats) => {
- if (err) {
- console.error('Error reading file stats:', err);
- return;
- }
-
- // Recurse if directory, otherwise validate markdown file
- if (stats.isDirectory()) {
- checkMarkdownFiles(filePath, validateFunction, relativeFilePath);
- } else if (path.extname(file) === '.md') {
- const fileContent = fs.readFileSync(filePath, 'utf-8');
- const { data: frontmatter } = matter(fileContent);
-
- const errors = validateFunction(frontmatter);
- if (errors) {
- console.log(`Errors in file ${relativeFilePath}:`);
- errors.forEach(error => console.log(` - ${error}`));
- process.exitCode = 1;
- }
- }
- });
- });
- });
-}
-
-const docsFolderPath = path.resolve(__dirname, '../../markdown/docs');
-const blogsFolderPath = path.resolve(__dirname, '../../markdown/blog');
-
-checkMarkdownFiles(docsFolderPath, validateDocs);
-checkMarkdownFiles(blogsFolderPath, validateBlogs);
diff --git a/scripts/markdown/check-markdown.ts b/scripts/markdown/check-markdown.ts
new file mode 100644
index 000000000000..49fbc74b28f1
--- /dev/null
+++ b/scripts/markdown/check-markdown.ts
@@ -0,0 +1,146 @@
+import fs from 'fs';
+import matter from 'gray-matter';
+import path from 'path';
+
+/**
+ * Checks if a given string is a valid URL.
+ * @param {string} str - The string to validate as a URL.
+ * @returns {boolean} True if the string is a valid URL, false otherwise.
+ */
+function isValidURL(str) {
+ try {
+ new URL(str);
+ return true;
+ } catch (err) {
+ return false;
+ }
+}
+
+/**
+ * Validates the frontmatter of a blog post.
+ * @param {object} frontmatter - The frontmatter object to validate.
+ * @param {string} filePath - The path to the file being validated.
+ * @returns {string[]|null} An array of validation error messages, or null if no errors.
+ */
+function validateBlogs(frontmatter) {
+ const requiredAttributes = ['title', 'date', 'type', 'tags', 'cover', 'authors'];
+ const errors = [];
+
+ // Check for required attributes
+ requiredAttributes.forEach((attr) => {
+ if (!frontmatter.hasOwnProperty(attr)) {
+ errors.push(`${attr} is missing`);
+ }
+ });
+
+ // Validate date format
+ if (frontmatter.date && Number.isNaN(Date.parse(frontmatter.date))) {
+ errors.push(`Invalid date format: ${frontmatter.date}`);
+ }
+
+ // Validate tags format (must be an array)
+ if (frontmatter.tags && !Array.isArray(frontmatter.tags)) {
+ errors.push(`Tags should be an array`);
+ }
+
+ // Validate cover is a string
+ if (frontmatter.cover && typeof frontmatter.cover !== 'string') {
+ errors.push(`Cover must be a string`);
+ }
+
+ // Validate authors (must be an array with valid attributes)
+ if (frontmatter.authors) {
+ if (!Array.isArray(frontmatter.authors)) {
+ errors.push('Authors should be an array');
+ } else {
+ frontmatter.authors.forEach((author, index) => {
+ if (!author.name) {
+ errors.push(`Author at index ${index} is missing a name`);
+ }
+ if (author.link && !isValidURL(author.link)) {
+ errors.push(`Invalid URL for author at index ${index}: ${author.link}`);
+ }
+ if (!author.photo) {
+ errors.push(`Author at index ${index} is missing a photo`);
+ }
+ });
+ }
+ }
+
+ return errors.length ? errors : null;
+}
+
+/**
+ * Validates the frontmatter of a documentation file.
+ * @param {object} frontmatter - The frontmatter object to validate.
+ * @param {string} filePath - The path to the file being validated.
+ * @returns {string[]|null} An array of validation error messages, or null if no errors.
+ */
+function validateDocs(frontmatter) {
+ const errors = [];
+
+ // Check if title exists and is a string
+ if (!frontmatter.title || typeof frontmatter.title !== 'string') {
+ errors.push('Title is missing or not a string');
+ }
+
+ // Check if weight exists and is a number
+ if (frontmatter.weight === undefined || typeof frontmatter.weight !== 'number') {
+ errors.push('Weight is missing or not a number');
+ }
+
+ return errors.length ? errors : null;
+}
+
+/**
+ * Recursively checks markdown files in a folder and validates their frontmatter.
+ * @param {string} folderPath - The path to the folder to check.
+ * @param {Function} validateFunction - The function used to validate the frontmatter.
+ * @param {string} [relativePath=''] - The relative path of the folder for logging purposes.
+ */
+function checkMarkdownFiles(folderPath, validateFunction, relativePath = '') {
+ fs.readdir(folderPath, (err, files) => {
+ if (err) {
+ console.error('Error reading directory:', err);
+ return;
+ }
+
+ files.forEach((file) => {
+ const filePath = path.join(folderPath, file);
+ const relativeFilePath = path.join(relativePath, file);
+
+ // Skip the folder 'docs/reference/specification'
+ if (relativeFilePath.includes('reference/specification')) {
+ return;
+ }
+
+ fs.stat(filePath, (err, stats) => {
+ if (err) {
+ console.error('Error reading file stats:', err);
+ return;
+ }
+
+ // Recurse if directory, otherwise validate markdown file
+ if (stats.isDirectory()) {
+ checkMarkdownFiles(filePath, validateFunction, relativeFilePath);
+ } else if (path.extname(file) === '.md') {
+ const fileContent = fs.readFileSync(filePath, 'utf-8');
+ const { data: frontmatter } = matter(fileContent);
+
+ const errors = validateFunction(frontmatter);
+ if (errors) {
+ console.log(`Errors in file ${relativeFilePath}:`);
+ errors.forEach((error) => console.log(` - ${error}`));
+ process.exitCode = 1;
+ }
+ }
+ });
+ });
+ });
+}
+
+const docsFolderPath = path.resolve(__dirname, '../../markdown/docs');
+const blogsFolderPath = path.resolve(__dirname, '../../markdown/blog');
+
+checkMarkdownFiles(docsFolderPath, validateDocs);
+checkMarkdownFiles(blogsFolderPath, validateBlogs);
diff --git a/scripts/tools/categorylist.js b/scripts/tools/categorylist.js
deleted file mode 100644
index 11fcc3790e9e..000000000000
--- a/scripts/tools/categorylist.js
+++ /dev/null
@@ -1,100 +0,0 @@
-// Various categories to define the category in which a tool has to be listed
-const categoryList = [
- {
- name: "APIs",
- tag: "api",
- description: "The following is a list of APIs that expose functionality related to AsyncAPI."
- },
- {
- name: "Code-first tools",
- tag: "code-first",
- description: "The following is a list of tools that generate AsyncAPI documents from your code."
- },
- {
- name: "Code Generators",
- tag: "code-generator",
- description: "The following is a list of tools that generate code from an AsyncAPI document; not the other way around."
- },
- {
- name: "Converters",
- tag: "converter",
- description: "The following is a list of tools that do not yet belong to any specific category but are also useful for the community."
- },
- {
- name: "Directories",
- tag: "directory",
- description: "The following is a list of directories that index public AsyncAPI documents."
- },
- {
- name: "Documentation Generators",
- tag: "documentation-generator",
- description: "The following is a list of tools that generate human-readable documentation from an AsyncAPI document."
- },
- {
- name: "Editors",
- tag: "editor",
- description: "The following is a list of editors or related tools that allow editing of AsyncAPI document."
- },
- {
- name: "UI components",
- tag: "ui-component",
- description: "The following is a list of UI components to view AsyncAPI documents."
- },
- {
- name: "DSL",
- tag: "dsl",
- description: "Writing YAML by hand is no fun, and maybe you don't want a GUI, so use a Domain Specific Language to write AsyncAPI in your language of choice."
- },
- {
- name: "Frameworks",
- tag: "framework",
- description: "The following is a list of API/application frameworks that make use of AsyncAPI."
- },
- {
- name: "GitHub Actions",
- tag: "github-action",
- description: "The following is a list of GitHub Actions that you can use in your workflows"
- },
- {
- name: "Mocking and Testing",
- tag: "mocking-and-testing",
- description: "The tools below take specification documents as input, then publish fake messages to broker destinations for simulation purposes. They may also check that publisher messages are compliant with schemas."
- },
- {
- name: "Validators",
- tag: "validator",
- description: "The following is a list of tools that validate AsyncAPI documents."
- },
- {
- name: "Compare tools",
- tag: "compare-tool",
- description: "The following is a list of tools that compare AsyncAPI documents."
- },
- {
- name: "CLIs",
- tag: "cli",
- description: "The following is a list of tools that you can work with in terminal or do some CI/CD automation."
- },
- {
- name: "Bundlers",
- tag: "bundler",
- description: "The following is a list of tools that you can work with to bundle AsyncAPI documents."
- },
- {
- name: "IDE Extensions",
- tag: "ide-extension",
- description: "The following is a list of extensions for different IDEs like VSCode, IntelliJ IDEA and others"
- },
- {
- name: "AsyncAPI Generator Templates",
- tag: "generator-template",
- description: "The following is a list of templates compatible with AsyncAPI Generator. You can use them to generate apps, clients or documentation from your AsyncAPI documents."
- },
- {
- name: "Others",
- tag: "other",
- description: "The following is a list of tools that comes under Other category."
- }
-]
-
-module.exports = {categoryList}
diff --git a/scripts/tools/categorylist.ts b/scripts/tools/categorylist.ts
new file mode 100644
index 000000000000..3ba7f2583517
--- /dev/null
+++ b/scripts/tools/categorylist.ts
@@ -0,0 +1,106 @@
+// Various categories to define the category in which a tool has to be listed
+const categoryList = [
+ {
+ name: 'APIs',
+ tag: 'api',
+ description: 'The following is a list of APIs that expose functionality related to AsyncAPI.'
+ },
+ {
+ name: 'Code-first tools',
+ tag: 'code-first',
+ description: 'The following is a list of tools that generate AsyncAPI documents from your code.'
+ },
+ {
+ name: 'Code Generators',
+ tag: 'code-generator',
+ description:
+ 'The following is a list of tools that generate code from an AsyncAPI document; not the other way around.'
+ },
+ {
+ name: 'Converters',
+ tag: 'converter',
+ description:
+ 'The following is a list of tools that do not yet belong to any specific category but are also useful for the community.'
+ },
+ {
+ name: 'Directories',
+ tag: 'directory',
+ description: 'The following is a list of directories that index public AsyncAPI documents.'
+ },
+ {
+ name: 'Documentation Generators',
+ tag: 'documentation-generator',
+ description:
+ 'The following is a list of tools that generate human-readable documentation from an AsyncAPI document.'
+ },
+ {
+ name: 'Editors',
+ tag: 'editor',
+ description: 'The following is a list of editors or related tools that allow editing of AsyncAPI document.'
+ },
+ {
+ name: 'UI components',
+ tag: 'ui-component',
+ description: 'The following is a list of UI components to view AsyncAPI documents.'
+ },
+ {
+ name: 'DSL',
+ tag: 'dsl',
+ description:
+ "Writing YAML by hand is no fun, and maybe you don't want a GUI, so use a Domain Specific Language to write AsyncAPI in your language of choice."
+ },
+ {
+ name: 'Frameworks',
+ tag: 'framework',
+ description: 'The following is a list of API/application frameworks that make use of AsyncAPI.'
+ },
+ {
+ name: 'GitHub Actions',
+ tag: 'github-action',
+ description: 'The following is a list of GitHub Actions that you can use in your workflows'
+ },
+ {
+ name: 'Mocking and Testing',
+ tag: 'mocking-and-testing',
+ description:
+ 'The tools below take specification documents as input, then publish fake messages to broker destinations for simulation purposes. They may also check that publisher messages are compliant with schemas.'
+ },
+ {
+ name: 'Validators',
+ tag: 'validator',
+ description: 'The following is a list of tools that validate AsyncAPI documents.'
+ },
+ {
+ name: 'Compare tools',
+ tag: 'compare-tool',
+ description: 'The following is a list of tools that compare AsyncAPI documents.'
+ },
+ {
+ name: 'CLIs',
+ tag: 'cli',
+ description: 'The following is a list of tools that you can work with in terminal or do some CI/CD automation.'
+ },
+ {
+ name: 'Bundlers',
+ tag: 'bundler',
+ description: 'The following is a list of tools that you can work with to bundle AsyncAPI documents.'
+ },
+ {
+ name: 'IDE Extensions',
+ tag: 'ide-extension',
+ description: 'The following is a list of extensions for different IDEs like VSCode, IntelliJ IDEA and others'
+ },
+ {
+ name: 'AsyncAPI Generator Templates',
+ tag: 'generator-template',
+ description:
+ 'The following is a list of templates compatible with AsyncAPI Generator. You can use them to generate apps, clients or documentation from your AsyncAPI documents.'
+ },
+ {
+ name: 'Others',
+ tag: 'other',
+ description: 'The following is a list of tools that comes under Other category.'
+ }
+];
+
+export { categoryList };
diff --git a/scripts/tools/combine-tools.js b/scripts/tools/combine-tools.js
deleted file mode 100644
index 1b1367b15ccb..000000000000
--- a/scripts/tools/combine-tools.js
+++ /dev/null
@@ -1,149 +0,0 @@
-const { languagesColor, technologiesColor } = require("./tags-color")
-const { categoryList } = require("./categorylist.js")
-const { createToolObject } = require("./tools-object")
-const fs = require('fs')
-const schema = require("./tools-schema.json");
-const Ajv = require("ajv")
-const addFormats = require("ajv-formats")
-const Fuse = require("fuse.js");
-const ajv = new Ajv()
-addFormats(ajv, ["uri"])
-const validate = ajv.compile(schema)
-
-let finalTools = {};
-for (var category of categoryList) {
- finalTools[category.name] = {
- description: category.description,
- toolsList: []
- };
-}
-
-// Config options set for the Fuse object
-const options = {
- includeScore: true,
- shouldSort: true,
- threshold: 0.39,
- keys: ['name', 'color', 'borderColor']
-}
-
-// Two seperate lists and Fuse objects initialised to search languages and technologies tags
-// from specified list of same.
-let languageList = [...languagesColor], technologyList = [...technologiesColor];
-let languageFuse = new Fuse(languageList, options), technologyFuse = new Fuse(technologyList, options)
-
-// takes individual tool object and inserts borderColor and backgroundColor of the tags of
-// languages and technologies, for Tool Card in website.
-const getFinalTool = async (toolObject) => {
- let finalObject = toolObject;
-
- //there might be a tool without language
- if (toolObject.filters.language) {
- let languageArray = []
- if (typeof toolObject.filters.language === 'string') {
- const languageSearch = await languageFuse.search(toolObject.filters.language)
- if (languageSearch.length) {
- languageArray.push(languageSearch[0].item);
- } else {
- // adds a new language object in the Fuse list as well as in tool object
- // so that it isn't missed out in the UI.
- let languageObject = {
- name: toolObject.filters.language,
- color: 'bg-[#57f281]',
- borderColor: 'border-[#37f069]'
- }
- languageList.push(languageObject);
- languageArray.push(languageObject)
- languageFuse = new Fuse(languageList, options)
- }
- } else {
- for (const language of toolObject?.filters?.language) {
- const languageSearch = await languageFuse.search(language)
- if (languageSearch.length > 0) {
- languageArray.push(languageSearch[0].item);
- }
- else {
- // adds a new language object in the Fuse list as well as in tool object
- // so that it isn't missed out in the UI.
- let languageObject = {
- name: language,
- color: 'bg-[#57f281]',
- borderColor: 'border-[#37f069]'
- }
- languageList.push(languageObject);
- languageArray.push(languageObject)
- languageFuse = new Fuse(languageList, options)
- }
- }
- }
- finalObject.filters.language = languageArray
- }
- let technologyArray = [];
- if (toolObject.filters.technology) {
- for (const technology of toolObject?.filters?.technology) {
- const technologySearch = await technologyFuse.search(technology)
- if (technologySearch.length > 0) {
- technologyArray.push(technologySearch[0].item);
- }
- else {
- // adds a new technology object in the Fuse list as well as in tool object
- // so that it isn't missed out in the UI.
- let technologyObject = {
- name: technology,
- color: 'bg-[#61d0f2]',
- borderColor: 'border-[#40ccf7]'
- }
- technologyList.push(technologyObject);
- technologyArray.push(technologyObject);
- technologyFuse = new Fuse(technologyList, options)
- }
- }
- }
- finalObject.filters.technology = technologyArray;
- return finalObject;
-}
-
-
-// Combine the automated tools and manual tools list into single JSON object file, and
-// lists down all the language and technology tags in one JSON file.
-const combineTools = async (automatedTools, manualTools, toolsPath, tagsPath) => {
- try {
- for (const key in automatedTools) {
- let finalToolsList = [];
- if (automatedTools[key].toolsList.length) {
- for (const tool of automatedTools[key].toolsList) {
- finalToolsList.push(await getFinalTool(tool))
- }
- }
- if (manualTools[key]?.toolsList?.length) {
- for (const tool of manualTools[key].toolsList) {
- let isAsyncAPIrepo;
- const isValid = await validate(tool)
- if (isValid) {
- if (tool?.links?.repoUrl) {
- const url = new URL(tool.links.repoUrl)
- isAsyncAPIrepo = url.href.startsWith("https://github.com/asyncapi/")
- } else isAsyncAPIrepo = false
- let toolObject = await createToolObject(tool, "", "", isAsyncAPIrepo)
- finalToolsList.push(await getFinalTool(toolObject))
- } else {
- console.error({
- message: 'Tool validation failed',
- tool: tool.title,
- source: 'manual-tools.json',
- errors: validate.errors,
- note: 'Script continues execution, error logged for investigation'
- });
- }
- }
- }
- finalToolsList.sort((tool, anotherTool) => tool.title.localeCompare(anotherTool.title));
- finalTools[key].toolsList = finalToolsList
- }
- fs.writeFileSync(toolsPath, JSON.stringify(finalTools));
- fs.writeFileSync(tagsPath, JSON.stringify({ languages: languageList, technologies: technologyList }),)
- } catch (err) {
- throw new Error(`Error combining tools: ${err}`);
- }
-}
-
-module.exports = { combineTools }
diff --git a/scripts/tools/combine-tools.ts b/scripts/tools/combine-tools.ts
new file mode 100644
index 000000000000..974dcf0201a3
--- /dev/null
+++ b/scripts/tools/combine-tools.ts
@@ -0,0 +1,165 @@
+import Ajv from 'ajv';
+import addFormats from 'ajv-formats';
+import fs from 'fs';
+import Fuse from 'fuse.js';
+
+import { categoryList } from './categorylist.js';
+import { languagesColor, technologiesColor } from './tags-color';
+import { createToolObject } from './tools-object';
+import schema from './tools-schema.json';
+
+const ajv = new Ajv();
+
+addFormats(ajv, ['uri']);
+const validate = ajv.compile(schema);
+
+const finalTools = {};
+
+for (const category of categoryList) {
+ finalTools[category.name] = {
+ description: category.description,
+ toolsList: []
+ };
+}
+
+// Config options set for the Fuse object
+const options = {
+ includeScore: true,
+ shouldSort: true,
+ threshold: 0.39,
+ keys: ['name', 'color', 'borderColor']
+};
+
+// Two seperate lists and Fuse objects initialised to search languages and technologies tags
+// from specified list of same.
+const languageList = [...languagesColor];
+const technologyList = [...technologiesColor];
+let languageFuse = new Fuse(languageList, options);
+let technologyFuse = new Fuse(technologyList, options);
+
+// takes individual tool object and inserts borderColor and backgroundColor of the tags of
+// languages and technologies, for Tool Card in website.
+const getFinalTool = async (toolObject) => {
+ const finalObject = toolObject;
+
+ // there might be a tool without language
+ if (toolObject.filters.language) {
+ const languageArray = [];
+
+ if (typeof toolObject.filters.language === 'string') {
+ const languageSearch = await languageFuse.search(toolObject.filters.language);
+
+ if (languageSearch.length) {
+ languageArray.push(languageSearch[0].item);
+ } else {
+ // adds a new language object in the Fuse list as well as in tool object
+ // so that it isn't missed out in the UI.
+ const languageObject = {
+ name: toolObject.filters.language,
+ color: 'bg-[#57f281]',
+ borderColor: 'border-[#37f069]'
+ };
+
+ languageList.push(languageObject);
+ languageArray.push(languageObject);
+ languageFuse = new Fuse(languageList, options);
+ }
+ } else {
+ for (const language of toolObject?.filters?.language) {
+ const languageSearch = await languageFuse.search(language);
+
+ if (languageSearch.length > 0) {
+ languageArray.push(languageSearch[0].item);
+ } else {
+ // adds a new language object in the Fuse list as well as in tool object
+ // so that it isn't missed out in the UI.
+ const languageObject = {
+ name: language,
+ color: 'bg-[#57f281]',
+ borderColor: 'border-[#37f069]'
+ };
+
+ languageList.push(languageObject);
+ languageArray.push(languageObject);
+ languageFuse = new Fuse(languageList, options);
+ }
+ }
+ }
+ finalObject.filters.language = languageArray;
+ }
+ const technologyArray = [];
+
+ if (toolObject.filters.technology) {
+ for (const technology of toolObject?.filters?.technology) {
+ const technologySearch = await technologyFuse.search(technology);
+
+ if (technologySearch.length > 0) {
+ technologyArray.push(technologySearch[0].item);
+ } else {
+ // adds a new technology object in the Fuse list as well as in tool object
+ // so that it isn't missed out in the UI.
+ const technologyObject = {
+ name: technology,
+ color: 'bg-[#61d0f2]',
+ borderColor: 'border-[#40ccf7]'
+ };
+
+ technologyList.push(technologyObject);
+ technologyArray.push(technologyObject);
+ technologyFuse = new Fuse(technologyList, options);
+ }
+ }
+ }
+ finalObject.filters.technology = technologyArray;
+
+ return finalObject;
+};
+
+// Combine the automated tools and manual tools list into single JSON object file, and
+// lists down all the language and technology tags in one JSON file.
+const combineTools = async (automatedTools, manualTools, toolsPath, tagsPath) => {
+ try {
+ for (const key in automatedTools) {
+ const finalToolsList = [];
+
+ if (automatedTools[key].toolsList.length) {
+ for (const tool of automatedTools[key].toolsList) {
+ finalToolsList.push(await getFinalTool(tool));
+ }
+ }
+ if (manualTools[key]?.toolsList?.length) {
+ for (const tool of manualTools[key].toolsList) {
+ let isAsyncAPIrepo;
+ const isValid = await validate(tool);
+
+ if (isValid) {
+ if (tool?.links?.repoUrl) {
+ const url = new URL(tool.links.repoUrl);
+
+ isAsyncAPIrepo = url.href.startsWith('https://github.com/asyncapi/');
+ } else isAsyncAPIrepo = false;
+ const toolObject = await createToolObject(tool, '', '', isAsyncAPIrepo);
+
+ finalToolsList.push(await getFinalTool(toolObject));
+ } else {
+ console.error({
+ message: 'Tool validation failed',
+ tool: tool.title,
+ source: 'manual-tools.json',
+ errors: validate.errors,
+ note: 'Script continues execution, error logged for investigation'
+ });
+ }
+ }
+ }
+ finalToolsList.sort((tool, anotherTool) => tool.title.localeCompare(anotherTool.title));
+ finalTools[key].toolsList = finalToolsList;
+ }
+ fs.writeFileSync(toolsPath, JSON.stringify(finalTools));
+ fs.writeFileSync(tagsPath, JSON.stringify({ languages: languageList, technologies: technologyList }));
+ } catch (err) {
+ throw new Error(`Error combining tools: ${err}`);
+ }
+};
+
+export { combineTools };
diff --git a/scripts/tools/extract-tools-github.js b/scripts/tools/extract-tools-github.js
deleted file mode 100644
index 55e96124b752..000000000000
--- a/scripts/tools/extract-tools-github.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const axios = require('axios');
-require('dotenv').config()
-
-const getData = async () => {
- try {
- const result = await axios.get(
- 'https://api.github.com/search/code?q=filename:.asyncapi-tool',
- {
- headers: {
- accept: 'application/vnd.github.text-match+json',
- authorization: `token ${process.env.GITHUB_TOKEN}`,
- },
- }
- );
-
- return result.data;
- } catch (err) {
- throw err;
- }
-};
-
-module.exports = { getData };
\ No newline at end of file
diff --git a/scripts/tools/extract-tools-github.ts b/scripts/tools/extract-tools-github.ts
new file mode 100644
index 000000000000..7b9831dfd67c
--- /dev/null
+++ b/scripts/tools/extract-tools-github.ts
@@ -0,0 +1,21 @@
+import axios from 'axios';
+import dotenv from 'dotenv';
+
+dotenv.config();
+
+const getData = async () => {
+ try {
+ const result = await axios.get('https://api.github.com/search/code?q=filename:.asyncapi-tool', {
+ headers: {
+ accept: 'application/vnd.github.text-match+json',
+ authorization: `token ${process.env.GITHUB_TOKEN}`
+ }
+ });
+
+ return result.data;
+ } catch (err) {
+ throw err;
+ }
+};
+
+export { getData };
diff --git a/scripts/tools/tags-color.js b/scripts/tools/tags-color.js
deleted file mode 100644
index 9a18ca2058d5..000000000000
--- a/scripts/tools/tags-color.js
+++ /dev/null
@@ -1,178 +0,0 @@
-// Language and Technology tags along with their colors in UI are defined here.
-const languagesColor = [
- {
- name: "Go/Golang",
- color: "bg-[#8ECFDF]",
- borderColor: "border-[#00AFD9]"
- },
- {
- name: "Java",
- color: "bg-[#ECA2A4]",
- borderColor: "border-[#EC2125]"
- },
- {
- name: "JavaScript",
- color: "bg-[#F2F1C7]",
- borderColor: "border-[#BFBE86]"
- },
- {
- name: "HTML",
- color: "bg-[#E2A291]",
- borderColor: "border-[#E44D26]"
- },
- {
- name: "C/C++",
- color: "bg-[#93CDEF]",
- borderColor: "border-[#0080CC]"
- },
- {
- name: "C#",
- color: "bg-[#E3AFE0]",
- borderColor: "border-[#9B4F96]"
- },
- {
- name: "Python",
- color: "bg-[#A8D0EF]",
- borderColor: "border-[#3878AB]"
- },
- {
- name: "TypeScript",
- color: "bg-[#7DBCFE]",
- borderColor: "border-[#2C78C7]"
- },
- {
- name: "Kotlin",
- color: "bg-[#B1ACDF]",
- borderColor: "border-[#756BD9]"
- },
- {
- name: "Scala",
- color: "bg-[#FFA299]",
- borderColor: "border-[#DF301F]"
- },
- {
- name: "Markdown",
- color: "bg-[#BABEBF]",
- borderColor: "border-[#445B64]"
- },
- {
- name: "YAML",
- color: "bg-[#FFB764]",
- borderColor: "border-[#F1901F]"
- },
- {
- name: "R",
- color: "bg-[#84B5ED]",
- borderColor: "border-[#246BBE]"
- },
- {
- name: "Ruby",
- color: "bg-[#FF8289]",
- borderColor: "border-[#FF000F]"
- },
- {
- name: "Rust",
- color: "bg-[#FFB8AA]",
- borderColor: "border-[#E43716]"
- },
- {
- name: "Shell",
- color: "bg-[#87D4FF]",
- borderColor: "border-[#389ED7]"
- },
- {
- name: "Groovy",
- color: "bg-[#B6D5E5]",
- borderColor: "border-[#609DBC]"
- }
-]
-
-const technologiesColor = [
- {
- name: "Node.js",
- color: "bg-[#BDFF67]",
- borderColor: "border-[#84CE24]"
- },
- {
- name: "Hermes",
- color: "bg-[#8AEEBD]",
- borderColor: "border-[#2AB672]"
- },
- {
- name: "React JS",
- color: "bg-[#9FECFA]",
- borderColor: "border-[#08D8FE]"
- },
- {
- name: ".NET",
- color: "bg-[#A184FF]",
- borderColor: "border-[#5026D4]"
- },
- {
- name: "ASP.NET",
- color: "bg-[#71C2FB]",
- borderColor: "border-[#1577BC]"
- },
- {
- name: "Springboot",
- color: "bg-[#98E279]",
- borderColor: "border-[#68BC44]"
- },
- {
- name: "AWS",
- color: "bg-[#FF9F59]",
- borderColor: "border-[#EF6703]"
- },
- {
- name: "Docker",
- color: "bg-[#B8E0FF]",
- borderColor: "border-[#2596ED]"
- },
- {
- name: "Node-RED",
- color: "bg-[#FF7474]",
- borderColor: "border-[#8F0101]"
- },
- {
- name: "Maven",
- color: "bg-[#FF6B80]",
- borderColor: "border-[#CA1A33]"
- },
- {
- name: "Saas",
- color: "bg-[#6AB8EC]",
- borderColor: "border-[#2275AD]"
- },
- {
- name: "Kubernetes-native",
- color: "bg-[#D7C7F2]",
- borderColor: "border-[#A387D2]"
- },
- {
- name: "Scala",
- color: "bg-[#D7C7F2]",
- borderColor: "border-[#A387D2]"
- },
- {
- name: "Azure",
- color: "bg-[#4B93FF]",
- borderColor: "border-[#015ADF]"
- },
- {
- name: "Jenkins",
- color: "bg-[#D7C7F2]",
- borderColor: "border-[#A387D2]"
- },
- {
- name: "Flask",
- color: "bg-[#D7C7F2]",
- borderColor: "border-[#A387D2]"
- },
- {
- name: "Nest Js",
- color: "bg-[#E1224E]",
- borderColor: "border-[#B9012b]"
- }
-]
-
-module.exports = {languagesColor, technologiesColor}
\ No newline at end of file
diff --git a/scripts/tools/tags-color.ts b/scripts/tools/tags-color.ts
new file mode 100644
index 000000000000..d6a905060988
--- /dev/null
+++ b/scripts/tools/tags-color.ts
@@ -0,0 +1,178 @@
+// Language and Technology tags along with their colors in UI are defined here.
+const languagesColor = [
+ {
+ name: 'Go/Golang',
+ color: 'bg-[#8ECFDF]',
+ borderColor: 'border-[#00AFD9]'
+ },
+ {
+ name: 'Java',
+ color: 'bg-[#ECA2A4]',
+ borderColor: 'border-[#EC2125]'
+ },
+ {
+ name: 'JavaScript',
+ color: 'bg-[#F2F1C7]',
+ borderColor: 'border-[#BFBE86]'
+ },
+ {
+ name: 'HTML',
+ color: 'bg-[#E2A291]',
+ borderColor: 'border-[#E44D26]'
+ },
+ {
+ name: 'C/C++',
+ color: 'bg-[#93CDEF]',
+ borderColor: 'border-[#0080CC]'
+ },
+ {
+ name: 'C#',
+ color: 'bg-[#E3AFE0]',
+ borderColor: 'border-[#9B4F96]'
+ },
+ {
+ name: 'Python',
+ color: 'bg-[#A8D0EF]',
+ borderColor: 'border-[#3878AB]'
+ },
+ {
+ name: 'TypeScript',
+ color: 'bg-[#7DBCFE]',
+ borderColor: 'border-[#2C78C7]'
+ },
+ {
+ name: 'Kotlin',
+ color: 'bg-[#B1ACDF]',
+ borderColor: 'border-[#756BD9]'
+ },
+ {
+ name: 'Scala',
+ color: 'bg-[#FFA299]',
+ borderColor: 'border-[#DF301F]'
+ },
+ {
+ name: 'Markdown',
+ color: 'bg-[#BABEBF]',
+ borderColor: 'border-[#445B64]'
+ },
+ {
+ name: 'YAML',
+ color: 'bg-[#FFB764]',
+ borderColor: 'border-[#F1901F]'
+ },
+ {
+ name: 'R',
+ color: 'bg-[#84B5ED]',
+ borderColor: 'border-[#246BBE]'
+ },
+ {
+ name: 'Ruby',
+ color: 'bg-[#FF8289]',
+ borderColor: 'border-[#FF000F]'
+ },
+ {
+ name: 'Rust',
+ color: 'bg-[#FFB8AA]',
+ borderColor: 'border-[#E43716]'
+ },
+ {
+ name: 'Shell',
+ color: 'bg-[#87D4FF]',
+ borderColor: 'border-[#389ED7]'
+ },
+ {
+ name: 'Groovy',
+ color: 'bg-[#B6D5E5]',
+ borderColor: 'border-[#609DBC]'
+ }
+];
+
+const technologiesColor = [
+ {
+ name: 'Node.js',
+ color: 'bg-[#BDFF67]',
+ borderColor: 'border-[#84CE24]'
+ },
+ {
+ name: 'Hermes',
+ color: 'bg-[#8AEEBD]',
+ borderColor: 'border-[#2AB672]'
+ },
+ {
+ name: 'React JS',
+ color: 'bg-[#9FECFA]',
+ borderColor: 'border-[#08D8FE]'
+ },
+ {
+ name: '.NET',
+ color: 'bg-[#A184FF]',
+ borderColor: 'border-[#5026D4]'
+ },
+ {
+ name: 'ASP.NET',
+ color: 'bg-[#71C2FB]',
+ borderColor: 'border-[#1577BC]'
+ },
+ {
+ name: 'Springboot',
+ color: 'bg-[#98E279]',
+ borderColor: 'border-[#68BC44]'
+ },
+ {
+ name: 'AWS',
+ color: 'bg-[#FF9F59]',
+ borderColor: 'border-[#EF6703]'
+ },
+ {
+ name: 'Docker',
+ color: 'bg-[#B8E0FF]',
+ borderColor: 'border-[#2596ED]'
+ },
+ {
+ name: 'Node-RED',
+ color: 'bg-[#FF7474]',
+ borderColor: 'border-[#8F0101]'
+ },
+ {
+ name: 'Maven',
+ color: 'bg-[#FF6B80]',
+ borderColor: 'border-[#CA1A33]'
+ },
+ {
+ name: 'Saas',
+ color: 'bg-[#6AB8EC]',
+ borderColor: 'border-[#2275AD]'
+ },
+ {
+ name: 'Kubernetes-native',
+ color: 'bg-[#D7C7F2]',
+ borderColor: 'border-[#A387D2]'
+ },
+ {
+ name: 'Scala',
+ color: 'bg-[#D7C7F2]',
+ borderColor: 'border-[#A387D2]'
+ },
+ {
+ name: 'Azure',
+ color: 'bg-[#4B93FF]',
+ borderColor: 'border-[#015ADF]'
+ },
+ {
+ name: 'Jenkins',
+ color: 'bg-[#D7C7F2]',
+ borderColor: 'border-[#A387D2]'
+ },
+ {
+ name: 'Flask',
+ color: 'bg-[#D7C7F2]',
+ borderColor: 'border-[#A387D2]'
+ },
+ {
+ name: 'Nest Js',
+ color: 'bg-[#E1224E]',
+ borderColor: 'border-[#B9012b]'
+ }
+];
+
+export { languagesColor, technologiesColor };
diff --git a/scripts/tools/tools-object.js b/scripts/tools/tools-object.js
deleted file mode 100644
index 1d8c73f8074b..000000000000
--- a/scripts/tools/tools-object.js
+++ /dev/null
@@ -1,116 +0,0 @@
-const schema = require("./tools-schema.json");
-const axios = require('axios')
-const Ajv = require("ajv")
-const addFormats = require("ajv-formats")
-const Fuse = require("fuse.js")
-const { categoryList } = require("./categorylist")
-const ajv = new Ajv()
-addFormats(ajv, ["uri"])
-const validate = ajv.compile(schema)
-const { convertToJson } = require('../utils');
-
-// Config options set for the Fuse object
-const options = {
- includeScore: true,
- shouldSort: true,
- threshold: 0.4,
- keys: ["tag"]
-}
-
-const fuse = new Fuse(categoryList, options)
-
-// using the contents of each toolFile (extracted from Github), along with Github URL
-// (repositoryUrl) of the tool, it's repository description (repoDescription) and
-// isAsyncAPIrepo boolean variable to define whether the tool repository is under
-// AsyncAPI organization or not, to create a JSON tool object as required in the frontend
-// side to show ToolCard.
-const createToolObject = async (toolFile, repositoryUrl = '', repoDescription = '', isAsyncAPIrepo = '') => {
- const resultantObject = {
- title: toolFile.title,
- description: toolFile?.description ? toolFile.description : repoDescription,
- links: {
- ...toolFile.links,
- repoUrl: toolFile?.links?.repoUrl ? toolFile.links.repoUrl : repositoryUrl
- },
- filters: {
- ...toolFile.filters,
- hasCommercial: toolFile?.filters?.hasCommercial ? toolFile.filters.hasCommercial : false,
- isAsyncAPIOwner: isAsyncAPIrepo
- }
- };
- return resultantObject;
-};
-
-// Each result obtained from the Github API call will be tested and verified
-// using the defined JSON schema, categorising each tool inside their defined categories
-// and creating a JSON tool object in which all the tools are listed in defined
-// categories order, which is then updated in `automated-tools.json` file.
-async function convertTools(data) {
- try {
- let finalToolsObject = {};
- const dataArray = data.items;
-
- // initialising finalToolsObject with all categories inside it with proper elements in each category
- finalToolsObject = Object.fromEntries(
- categoryList.map((category) => [
- category.name,
- {
- description: category.description,
- toolsList: []
- }
- ])
- );
-
- await Promise.all(dataArray.map(async (tool) => {
- try {
- if (tool.name.startsWith('.asyncapi-tool')) {
- const referenceId = tool.url.split('=')[1];
- const downloadUrl = `https://raw.githubusercontent.com/${tool.repository.full_name}/${referenceId}/${tool.path}`;
-
- const { data: toolFileContent } = await axios.get(downloadUrl);
-
- //some stuff can be YAML
- const jsonToolFileContent = await convertToJson(toolFileContent)
-
- //validating against JSON Schema for tools file
- const isValid = await validate(jsonToolFileContent)
-
- if (isValid) {
- const repositoryUrl = tool.repository.html_url;
- const repoDescription = tool.repository.description;
- const isAsyncAPIrepo = tool.repository.owner.login === 'asyncapi';
- const toolObject = await createToolObject(
- jsonToolFileContent,
- repositoryUrl,
- repoDescription,
- isAsyncAPIrepo
- );
-
- // Tool Object is appended to each category array according to Fuse search for categories inside Tool Object
- await Promise.all(jsonToolFileContent.filters.categories.map(async (category) => {
- const categorySearch = await fuse.search(category);
- const targetCategory = categorySearch.length ? categorySearch[0].item.name : 'Others';
- const { toolsList } = finalToolsObject[targetCategory];
- if (!toolsList.includes(toolObject)) {
- toolsList.push(toolObject);
- }
- }));
- } else {
- console.error('Script is not failing, it is just dropping errors for further investigation');
- console.error('Invalid .asyncapi-tool file.');
- console.error(`Located in: ${tool.html_url}`);
- console.error('Validation errors:', JSON.stringify(validate.errors, null, 2));
- }
- }
- } catch (err) {
- console.error(err)
- throw err;
- }
- }))
- return finalToolsObject;
- } catch (err) {
- throw new Error(`Error processing tool: ${err.message}`)
- }
-}
-
-module.exports = { convertTools, createToolObject }
diff --git a/scripts/tools/tools-object.ts b/scripts/tools/tools-object.ts
new file mode 100644
index 000000000000..4b15f8b9af6e
--- /dev/null
+++ b/scripts/tools/tools-object.ts
@@ -0,0 +1,126 @@
+import Ajv from 'ajv';
+import addFormats from 'ajv-formats';
+import axios from 'axios';
+import Fuse from 'fuse.js';
+
+import { convertToJson } from '../utils';
+import { categoryList } from './categorylist';
+import schema from './tools-schema.json';
+
+const ajv = new Ajv();
+
+addFormats(ajv, ['uri']);
+const validate = ajv.compile(schema);
+
+// Config options set for the Fuse object
+const options = {
+ includeScore: true,
+ shouldSort: true,
+ threshold: 0.4,
+ keys: ['tag']
+};
+
+const fuse = new Fuse(categoryList, options);
+
+// using the contents of each toolFile (extracted from Github), along with Github URL
+// (repositoryUrl) of the tool, it's repository description (repoDescription) and
+// isAsyncAPIrepo boolean variable to define whether the tool repository is under
+// AsyncAPI organization or not, to create a JSON tool object as required in the frontend
+// side to show ToolCard.
+const createToolObject = async (toolFile, repositoryUrl = '', repoDescription = '', isAsyncAPIrepo = '') => {
+ const resultantObject = {
+ title: toolFile.title,
+ description: toolFile?.description ? toolFile.description : repoDescription,
+ links: {
+ ...toolFile.links,
+ repoUrl: toolFile?.links?.repoUrl ? toolFile.links.repoUrl : repositoryUrl
+ },
+ filters: {
+ ...toolFile.filters,
+ hasCommercial: toolFile?.filters?.hasCommercial ? toolFile.filters.hasCommercial : false,
+ isAsyncAPIOwner: isAsyncAPIrepo
+ }
+ };
+
+ return resultantObject;
+};
+
+// Each result obtained from the Github API call will be tested and verified
+// using the defined JSON schema, categorising each tool inside their defined categories
+// and creating a JSON tool object in which all the tools are listed in defined
+// categories order, which is then updated in `automated-tools.json` file.
+async function convertTools(data) {
+ try {
+ let finalToolsObject = {};
+ const dataArray = data.items;
+
+ // initialising finalToolsObject with all categories inside it with proper elements in each category
+ finalToolsObject = Object.fromEntries(
+ categoryList.map((category) => [
+ category.name,
+ {
+ description: category.description,
+ toolsList: []
+ }
+ ])
+ );
+
+ await Promise.all(
+ dataArray.map(async (tool) => {
+ try {
+ if (tool.name.startsWith('.asyncapi-tool')) {
+ const referenceId = tool.url.split('=')[1];
+ const downloadUrl = `https://raw.githubusercontent.com/${tool.repository.full_name}/${referenceId}/${tool.path}`;
+
+ const { data: toolFileContent } = await axios.get(downloadUrl);
+
+ // some stuff can be YAML
+ const jsonToolFileContent = await convertToJson(toolFileContent);
+
+ // validating against JSON Schema for tools file
+ const isValid = await validate(jsonToolFileContent);
+
+ if (isValid) {
+ const repositoryUrl = tool.repository.html_url;
+ const repoDescription = tool.repository.description;
+ const isAsyncAPIrepo = tool.repository.owner.login === 'asyncapi';
+ const toolObject = await createToolObject(
+ jsonToolFileContent,
+ repositoryUrl,
+ repoDescription,
+ isAsyncAPIrepo
+ );
+
+ // Tool Object is appended to each category array according to Fuse search for categories inside Tool Object
+ await Promise.all(
+ jsonToolFileContent.filters.categories.map(async (category) => {
+ const categorySearch = await fuse.search(category);
+ const targetCategory = categorySearch.length ? categorySearch[0].item.name : 'Others';
+ const { toolsList } = finalToolsObject[targetCategory];
+
+ if (!toolsList.includes(toolObject)) {
+ toolsList.push(toolObject);
+ }
+ })
+ );
+ } else {
+ console.error('Script is not failing, it is just dropping errors for further investigation');
+ console.error('Invalid .asyncapi-tool file.');
+ console.error(`Located in: ${tool.html_url}`);
+ console.error('Validation errors:', JSON.stringify(validate.errors, null, 2));
+ }
+ }
+ } catch (err) {
+ console.error(err);
+ throw err;
+ }
+ })
+ );
+
+ return finalToolsObject;
+ } catch (err) {
+ throw new Error(`Error processing tool: ${err.message}`);
+ }
+}
+
+export { convertTools, createToolObject };
diff --git a/scripts/tools/tools-schema.json b/scripts/tools/tools-schema.json
index e11968a1b2e1..74bcb3d783b4 100644
--- a/scripts/tools/tools-schema.json
+++ b/scripts/tools/tools-schema.json
@@ -1,220 +1,209 @@
{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "title": "JSON Schema for AsyncAPI tool discovery file.",
- "type": "object",
- "additionalProperties": false,
- "required": [
- "title",
- "filters"
- ],
- "properties": {
- "title": {
- "type": "string",
- "description": "Human-readable name of the tool that will be visible to people in the list of tools.",
- "examples": [
- "AsyncAPI Generator",
- "Cupid"
- ]
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "JSON Schema for AsyncAPI tool discovery file.",
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["title", "filters"],
+ "properties": {
+ "title": {
+ "type": "string",
+ "description": "Human-readable name of the tool that will be visible to people in the list of tools.",
+ "examples": ["AsyncAPI Generator", "Cupid"]
+ },
+ "description": {
+ "type": "string",
+ "description": "By default scripts read description of repository there project is stored. You can override this behaviour by providing custom description."
+ },
+ "links": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "websiteUrl": {
+ "type": "string",
+ "description": "You can provide URL to the website where your project hosts some demo or project landing page.",
+ "format": "uri"
},
- "description": {
- "type": "string",
- "description": "By default scripts read description of repository there project is stored. You can override this behaviour by providing custom description."
+ "docsUrl": {
+ "type": "string",
+ "description": "You can provide URL to project documentation in case you have more than just a readme file.",
+ "format": "uri"
},
- "links": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "websiteUrl": {
- "type": "string",
- "description": "You can provide URL to the website where your project hosts some demo or project landing page.",
- "format": "uri"
- },
- "docsUrl": {
- "type": "string",
- "description": "You can provide URL to project documentation in case you have more than just a readme file.",
- "format": "uri"
+ "repoUrl": {
+ "type": "string",
+ "description": "You can provide URL to project codebase in case you have more than one tool present inside single repository.",
+ "format": "uri"
+ }
+ }
+ },
+ "filters": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["categories"],
+ "properties": {
+ "language": {
+ "description": "The language referred to is the runtime language selected by the user, not the generator or library language. For example, the Generator written in JavaScript generates Python code from the JavaScript template and the result of generation is a Python app, so the language for Generator is specified as Python. But for the Bundler library, users need to know if it can be integrated into their TypeScript codebase, so its language is specified as TypeScript. If some language in the schema's enum is omitted, it can be added through a pull request to the AsyncAPI website repository.",
+ "anyOf": [
+ {
+ "type": "string",
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "Go",
+ "Java",
+ "JavaScript",
+ "HTML",
+ "C/C++",
+ "C#",
+ "Python",
+ "TypeScript",
+ "Kotlin",
+ "Scala",
+ "Markdown",
+ "YAML",
+ "R",
+ "Ruby",
+ "Rust",
+ "Shell",
+ "Groovy"
+ ]
},
- "repoUrl": {
- "type": "string",
- "description": "You can provide URL to project codebase in case you have more than one tool present inside single repository.",
- "format": "uri"
+ {
+ "type": "string"
}
- }
- },
- "filters": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "categories"
- ],
- "properties": {
- "language": {
- "description": "The language referred to is the runtime language selected by the user, not the generator or library language. For example, the Generator written in JavaScript generates Python code from the JavaScript template and the result of generation is a Python app, so the language for Generator is specified as Python. But for the Bundler library, users need to know if it can be integrated into their TypeScript codebase, so its language is specified as TypeScript. If some language in the schema's enum is omitted, it can be added through a pull request to the AsyncAPI website repository.",
- "anyOf": [
- {
- "type": "string",
- "anyOf": [
- {
- "type": "string",
- "enum": [
- "Go",
- "Java",
- "JavaScript",
- "HTML",
- "C/C++",
- "C#",
- "Python",
- "TypeScript",
- "Kotlin",
- "Scala",
- "Markdown",
- "YAML",
- "R",
- "Ruby",
- "Rust",
- "Shell",
- "Groovy"
- ]
- },
- {
- "type": "string"
- }
- ]
- },
- {
- "type": "array",
- "items": {
- "type": "string",
- "anyOf": [
- {
- "type": "string",
- "enum": [
- "Go",
- "Java",
- "JavaScript",
- "HTML",
- "C/C++",
- "C#",
- "Python",
- "TypeScript",
- "Kotlin",
- "Scala",
- "Markdown",
- "YAML",
- "R",
- "Ruby",
- "Rust",
- "Shell",
- "Groovy"
- ]
- },
- {
- "type": "string"
- }
- ]
- }
- }
- ]
- },
- "technology": {
- "type": "array",
- "description": "Provide a list of different technologies used in the tool. Put details useful for tool user and tool contributor.",
- "items": {
- "type": "string",
- "anyOf": [
- {
- "type": "string",
- "enum": [
- "Node js",
- "Hermes",
- "React JS",
- ".NET",
- "ASP.NET",
- "Springboot",
- "AWS",
- "Docker",
- "Node-red",
- "Maven",
- "Saas",
- "Kubernetes-native",
- "Scala",
- "Azure",
- "Jenkins",
- "Flask"
- ]
- },
- {
- "type": "string"
- }
- ]
- },
- "examples": [
- "Express.js",
- "Kafka"
- ]
- },
- "categories": {
- "type": "array",
- "description": "Categories are used to group tools by different use case, like documentation or code generation. If have a list of fixed categories. If you use different one that your tool lands under \"other\" category. Feel free to add your category through a pull request to AsyncAPI website repository.",
- "items": {
- "type": "string",
- "anyOf": [
- {
- "type": "string",
- "enum": [
- "api",
- "code-first",
- "code-generator",
- "converter",
- "directory",
- "documentation-generator",
- "editor",
- "ui-component",
- "dsl",
- "framework",
- "github-action",
- "mocking-and-testing",
- "validator",
- "compare-tool",
- "other",
- "cli",
- "bundler",
- "ide-extension"
- ]
- },
- {
- "type": "string"
- }
- ]
- },
- "minItems": 1,
- "examples": [
- "api",
- "code-first",
- "code-generator",
- "converter",
- "directory",
- "documentation-generator",
- "editor",
- "ui-component",
- "dsl",
- "framework",
- "github-action",
- "mocking-and-testing",
- "validator",
- "compare-tool",
- "other",
- "cli",
- "bundler",
- "ide-extension"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "Go",
+ "Java",
+ "JavaScript",
+ "HTML",
+ "C/C++",
+ "C#",
+ "Python",
+ "TypeScript",
+ "Kotlin",
+ "Scala",
+ "Markdown",
+ "YAML",
+ "R",
+ "Ruby",
+ "Rust",
+ "Shell",
+ "Groovy"
]
- },
- "hasCommercial": {
- "type": "boolean",
- "description": "Indicate if your tool is open source or commercial offering, like SAAS for example",
- "default": false
- }
+ },
+ {
+ "type": "string"
+ }
+ ]
+ }
}
+ ]
+ },
+ "technology": {
+ "type": "array",
+ "description": "Provide a list of different technologies used in the tool. Put details useful for tool user and tool contributor.",
+ "items": {
+ "type": "string",
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "Node js",
+ "Hermes",
+ "React JS",
+ ".NET",
+ "ASP.NET",
+ "Springboot",
+ "AWS",
+ "Docker",
+ "Node-red",
+ "Maven",
+ "Saas",
+ "Kubernetes-native",
+ "Scala",
+ "Azure",
+ "Jenkins",
+ "Flask"
+ ]
+ },
+ {
+ "type": "string"
+ }
+ ]
+ },
+ "examples": ["Express.js", "Kafka"]
+ },
+ "categories": {
+ "type": "array",
+ "description": "Categories are used to group tools by different use case, like documentation or code generation. If have a list of fixed categories. If you use different one that your tool lands under \"other\" category. Feel free to add your category through a pull request to AsyncAPI website repository.",
+ "items": {
+ "type": "string",
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "api",
+ "code-first",
+ "code-generator",
+ "converter",
+ "directory",
+ "documentation-generator",
+ "editor",
+ "ui-component",
+ "dsl",
+ "framework",
+ "github-action",
+ "mocking-and-testing",
+ "validator",
+ "compare-tool",
+ "other",
+ "cli",
+ "bundler",
+ "ide-extension"
+ ]
+ },
+ {
+ "type": "string"
+ }
+ ]
+ },
+ "minItems": 1,
+ "examples": [
+ "api",
+ "code-first",
+ "code-generator",
+ "converter",
+ "directory",
+ "documentation-generator",
+ "editor",
+ "ui-component",
+ "dsl",
+ "framework",
+ "github-action",
+ "mocking-and-testing",
+ "validator",
+ "compare-tool",
+ "other",
+ "cli",
+ "bundler",
+ "ide-extension"
+ ]
+ },
+ "hasCommercial": {
+ "type": "boolean",
+ "description": "Indicate if your tool is open source or commercial offering, like SAAS for example",
+ "default": false
}
+ }
}
-}
\ No newline at end of file
+ }
+}
diff --git a/scripts/utils.js b/scripts/utils.js
deleted file mode 100644
index c740ae91eaef..000000000000
--- a/scripts/utils.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const yaml = require('yaml');
-
-function convertToJson(contentYAMLorJSON) {
- // Axios handles conversion to JSON by default, if data returned from the server allows it
- // So if returned content is not a string (not YAML), we just return JSON back
- if (typeof contentYAMLorJSON !== "string") {
- return contentYAMLorJSON;
- }
-
- // Check if the content is valid JSON before attempting to parse as YAML
- try {
- const jsonContent = JSON.parse(contentYAMLorJSON);
- return jsonContent;
- } catch (jsonError) {
- // If it's not valid JSON, try parsing it as YAML
- try {
- const yamlContent = yaml.parse(contentYAMLorJSON);
- return yamlContent;
- } catch (yamlError) {
- // If parsing as YAML also fails, throw an error
- throw new Error(`Invalid content format:\nJSON Parse Error: ${jsonError}\nYAML Parse Error: ${yamlError}`);
- }
- }
-}
-
-module.exports = { convertToJson };
diff --git a/scripts/utils.ts b/scripts/utils.ts
new file mode 100644
index 000000000000..17044631ec5c
--- /dev/null
+++ b/scripts/utils.ts
@@ -0,0 +1,28 @@
+import yaml from 'yaml';
+
+function convertToJson(contentYAMLorJSON: string) {
+ // Axios handles conversion to JSON by default, if data returned from the server allows it
+ // So if returned content is not a string (not YAML), we just return JSON back
+ if (typeof contentYAMLorJSON !== 'string') {
+ return contentYAMLorJSON;
+ }
+
+ // Check if the content is valid JSON before attempting to parse as YAML
+ try {
+ const jsonContent = JSON.parse(contentYAMLorJSON);
+
+ return jsonContent;
+ } catch (jsonError) {
+ // If it's not valid JSON, try parsing it as YAML
+ try {
+ const yamlContent = yaml.parse(contentYAMLorJSON);
+
+ return yamlContent;
+ } catch (yamlError) {
+ // If parsing as YAML also fails, throw an error
+ throw new Error(`Invalid content format:\nJSON Parse Error: ${jsonError}\nYAML Parse Error: ${yamlError}`);
+ }
+ }
+}
+
+export { convertToJson };
diff --git a/scripts/utils/readAndWriteJson.js b/scripts/utils/readAndWriteJson.js
deleted file mode 100644
index 3c7f05d2308b..000000000000
--- a/scripts/utils/readAndWriteJson.js
+++ /dev/null
@@ -1,28 +0,0 @@
-const { promises: { readFile, writeFile } } = require('fs');
-const { convertToJson } = require("../utils");
-
-module.exports = async function writeJSON(readPath, writePath) {
- let readContent;
- let jsonContent;
-
- // Attempt to read the file
- try {
- readContent = await readFile(readPath, 'utf-8');
- } catch (err) {
- throw new Error(`Error while reading file\nError: ${err}`);
- }
-
- // Attempt to convert content to JSON
- try {
- jsonContent = convertToJson(readContent);
- } catch (err) {
- throw new Error(`Error while conversion\nError: ${err}`);
- }
-
- // Attempt to write the JSON content to file
- try {
- await writeFile(writePath, JSON.stringify(jsonContent));
- } catch (err) {
- throw new Error(`Error while writing file\nError: ${err}`);
- }
-};
\ No newline at end of file
diff --git a/scripts/utils/readAndWriteJson.ts b/scripts/utils/readAndWriteJson.ts
new file mode 100644
index 000000000000..6e5e65a17a5e
--- /dev/null
+++ b/scripts/utils/readAndWriteJson.ts
@@ -0,0 +1,29 @@
+import { readFile, writeFile } from 'fs/promises';
+
+import { convertToJson } from '../utils';
+
+export async function writeJSON(readPath: string, writePath: string) {
+ let readContent;
+ let jsonContent;
+
+ // Attempt to read the file
+ try {
+ readContent = await readFile(readPath, 'utf-8');
+ } catch (err) {
+ throw new Error(`Error while reading file\nError: ${err}`);
+ }
+
+ // Attempt to convert content to JSON
+ try {
+ jsonContent = convertToJson(readContent);
+ } catch (err) {
+ throw new Error(`Error while conversion\nError: ${err}`);
+ }
+
+ // Attempt to write the JSON content to file
+ try {
+ await writeFile(writePath, JSON.stringify(jsonContent));
+ } catch (err) {
+ throw new Error(`Error while writing file\nError: ${err}`);
+ }
+}
diff --git a/tsconfig.json b/tsconfig.json
index d7c7683d9403..b73954042008 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -16,6 +16,10 @@
"@/*": ["./*"]
}
},
- "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "types/**/*.d.ts", "**/*.json"],
- "exclude": ["node_modules", "netlify"]
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.json"],
+ "exclude": ["node_modules", "netlify"],
+ "ts-node": {
+ "experimentalSpecifierResolution": "node",
+ "transpileOnly": true
+ }
}
diff --git a/types/packages/jgexml__json2xml.d.ts b/types/packages/jgexml__json2xml.d.ts
new file mode 100644
index 000000000000..7c5bd65dce9c
--- /dev/null
+++ b/types/packages/jgexml__json2xml.d.ts
@@ -0,0 +1,9 @@
+declare module 'jgexml/json2xml' {
+ interface Json2Xml {
+ getXml(feed: unknown, attributePrefix: string, defaultValue: string, indentLevel: number): string;
+ }
+
+ const json2xml: Json2Xml;
+
+ export = json2xml;
+}
diff --git a/types/scripts/build-rss.ts b/types/scripts/build-rss.ts
new file mode 100644
index 000000000000..3780fdbb0453
--- /dev/null
+++ b/types/scripts/build-rss.ts
@@ -0,0 +1,37 @@
+export type BlogPostTypes = 'docs' | 'blog' | 'about' | 'docsTree';
+export type Enclosure = {
+ '@url': string;
+ '@length': number;
+ '@type': string;
+ enclosure: Enclosure;
+};
+
+export type RSSItemType = {
+ title: string;
+ description: string;
+ link: string;
+ category: BlogPostTypes;
+ guid: any;
+ pubDate: string;
+ enclosure: Enclosure;
+};
+export type RSS = {
+ '@version': string;
+ '@xmlns:atom': string;
+ channel: {
+ title: string;
+ link: string;
+ 'atom:link': {
+ '@rel': string;
+ '@href': string;
+ '@type': string;
+ };
+ description: string;
+ language: string;
+ copyright: string;
+ webMaster: string;
+ pubDate: string; // UTC string format
+ generator: string;
+ item: RSSItemType[];
+ };
+};
diff --git a/utils/getStatic.ts b/utils/getStatic.ts
index 56af2cf6b1ca..304654e6f6de 100644
--- a/utils/getStatic.ts
+++ b/utils/getStatic.ts
@@ -1,6 +1,6 @@
import { serverSideTranslations } from 'next-i18next/serverSideTranslations';
-import i18nextConfig from '../next-i18next.config';
+import i18nextConfig from '../next-i18next.config.cjs';
/**
* Retrieves the internationalization paths for the supported locales.
@@ -9,8 +9,8 @@ import i18nextConfig from '../next-i18next.config';
export const getI18nPaths = () =>
i18nextConfig.i18n.locales.map((lng) => ({
params: {
- lang: lng
- }
+ lang: lng,
+ },
}));
/**
@@ -19,7 +19,7 @@ export const getI18nPaths = () =>
*/
export const getStaticPaths = () => ({
fallback: false,
- paths: getI18nPaths()
+ paths: getI18nPaths(),
});
/**
@@ -31,7 +31,7 @@ export const getStaticPaths = () => ({
export async function getI18nProps(ctx: any, ns = ['common']) {
const locale = ctx?.params?.lang;
const props = {
- ...(await serverSideTranslations(locale, ns))
+ ...(await serverSideTranslations(locale, ns)),
};
return props;
@@ -45,7 +45,7 @@ export async function getI18nProps(ctx: any, ns = ['common']) {
export function makeStaticProps(ns = {}) {
return async function getStaticProps(ctx: any) {
return {
- props: await getI18nProps(ctx, ns as any)
+ props: await getI18nProps(ctx, ns as any),
};
};
}
diff --git a/utils/languageDetector.ts b/utils/languageDetector.ts
index e3db95e0f17d..3d4c810dafb2 100644
--- a/utils/languageDetector.ts
+++ b/utils/languageDetector.ts
@@ -1,8 +1,8 @@
import languageDetector from 'next-language-detector';
-import i18nextConfig from '../next-i18next.config';
+import i18nextConfig from '../next-i18next.config.cjs';
export default languageDetector({
supportedLngs: i18nextConfig.i18n.locales,
- fallbackLng: i18nextConfig.i18n.defaultLocale
+ fallbackLng: i18nextConfig.i18n.defaultLocale,
});