-
Notifications
You must be signed in to change notification settings - Fork 14
/
server.js
118 lines (105 loc) · 2.85 KB
/
server.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
const { createServer } = require("http");
const { parse } = require("url");
const next = require("next");
const fs = require("fs");
const path = require("path");
const siteCrawler = require("./server-search/site-crawler");
const siteScraper = require("./server-search/site-scraper");
let searchIndex = JSON.parse(
fs.readFileSync(
path.resolve(__dirname, "./server-assets/search-index.json"),
{ encoding: "utf-8" }
)
);
const app = next({ dev: false });
const handle = app.getRequestHandler();
let searchCache = {};
function searchForTerm(term, bucket) {
const lowerTerm = trimContent(term);
return Object.entries(bucket).reduce((acc, [key, meta]) => {
if (
key.toLowerCase().includes(lowerTerm) ||
meta?.synonyms?.toLowerCase().includes(lowerTerm)
) {
return [
...acc,
{
url: meta.linkResult,
term,
value: key,
},
];
}
return acc;
}, []);
}
function trimContent(content = "") {
return content.trim().toLocaleLowerCase().replace(/\s/gm, "");
}
function handleSearch(term) {
const tagKeys = Object.keys(searchIndex);
const buckets = tagKeys.map((key) => searchIndex[key]);
const searchJobs = buckets.map((bucket) => searchForTerm(term, bucket));
const [h1, h2, h3, h4, h5, h6, strong, em, p, table, li] = searchJobs;
const result = {
h1,
h2,
h3,
h4,
h5,
h6,
strong,
em,
p,
table,
li,
};
return result;
}
app.prepare().then(() => {
createServer((req, res) => {
const parsedUrl = parse(req.url, true);
/**
* If we need to parse additional URL requests we can do it here
* const { pathname, query } = parsedUrl;
*/
/**
* We can serve static assets like service workes here
* if (pathname === '/a') {
* app.render(req, res, '/a', query)
* } else if() {...}
*/
const { pathname, query } = parsedUrl;
if (pathname === "/search" || pathname === "/platform-docs/search") {
if (!query?.term) {
return res.end(JSON.stringify({ noResult: true }));
}
try {
return res.end(JSON.stringify({ data: handleSearch(query?.term) }));
} catch (error) {
return res.end(JSON.stringify({ searchError: true }));
}
}
handle(req, res, parsedUrl);
}).listen(3000, async (err) => {
if (err) {
throw err;
}
// run scraping after server is running
console.log("> Ready on http://localhost:3000");
try {
await siteCrawler();
await siteScraper();
searchIndex = JSON.parse(
fs.readFileSync(
path.resolve(__dirname, "./server-assets/search-index.json"),
{ encoding: "utf-8" }
)
);
searchCache = {};
console.log("> New search index generated");
} catch (error) {
console.error("> failed to generate search index", error);
}
});
});