From 675364013e3b07fc665bfea70e2489b28bad0d8e Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Thu, 4 Jan 2024 17:17:55 +0100 Subject: [PATCH 01/59] feat(storage): upgrade to the Elasticsearch 8 client --- lib/service/storage/elasticsearch.ts | 433 +++++++++++++-------------- package-lock.json | 81 +++-- package.json | 2 +- 3 files changed, 242 insertions(+), 274 deletions(-) diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 2a7b410ac3..d7496f50eb 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -21,21 +21,14 @@ import _ from "lodash"; -import { - ApiResponse, - RequestParams, - Client as StorageClient, -} from "@elastic/elasticsearch"; +import { Client as StorageClient, estypes } from "@elastic/elasticsearch"; import { InfoResult, - KRequestBody, JSONObject, KImportError, + KRequestBody, KRequestParams, } from "../../types/storage/Elasticsearch"; -import { Index, IndicesCreate } from "@elastic/elasticsearch/api/requestParams"; - -import { TypeMapping } from "@elastic/elasticsearch/api/types"; import assert from "assert"; @@ -209,9 +202,7 @@ export default class ElasticSearch extends Service { this._esWrapper = new ESWrapper(this._client); - const { - body: { version }, - } = await this._client.info(); + const { version } = await this._client.info(); if ( version && @@ -252,18 +243,18 @@ export default class ElasticSearch extends Service { return this._client .info() - .then(({ body }) => { + .then((body) => { result.version = body.version.number; result.lucene = body.version.lucene_version; return this._client.cluster.health(); }) - .then(({ body }) => { + .then((body) => { result.status = body.status; return this._client.cluster.stats({ human: true }); }) - .then(({ body }) => { + .then((body) => { result.spaceUsed = body.indices.store.size; result.nodes = body.nodes; @@ -282,7 +273,7 @@ export default class ElasticSearch extends Service { metric: ["docs", "store"], }; - const { body } = await this._client.indices.stats(esRequest); + const body = await this._client.indices.stats(esRequest); const indexes = {}; let size = 0; @@ -342,7 +333,7 @@ export default class ElasticSearch extends Service { */ async scroll(scrollId: string, { scrollTTL }: { scrollTTL?: string } = {}) { const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; - const esRequest: RequestParams.Scroll> = { + const esRequest: estypes.ScrollRequest = { scroll: _scrollTTL, scroll_id: scrollId, }; @@ -377,11 +368,12 @@ export default class ElasticSearch extends Service { const scrollInfo = JSON.parse(stringifiedScrollInfo); try { - const { body } = await this._client.scroll(esRequest); + const body = await this._client.scroll(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); scrollInfo.fetched += body.hits.hits.length; - if (scrollInfo.fetched >= body.hits.total.value) { + if (scrollInfo.fetched >= totalHitsValue) { debug("Last scroll page fetched: deleting scroll %s", body._scroll_id); await global.kuzzle.ask("core:cache:internal:del", cacheKey); await this.clearScroll(body._scroll_id); @@ -396,9 +388,9 @@ export default class ElasticSearch extends Service { ); } - body.remaining = body.hits.total.value - scrollInfo.fetched; + const remaining = totalHitsValue - scrollInfo.fetched; - return await this._formatSearchResult(body, scrollInfo); + return await this._formatSearchResult(body, remaining, scrollInfo); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -453,13 +445,13 @@ export default class ElasticSearch extends Service { esIndexes = this._getAlias(index, collection); } - const esRequest = { - body: this._sanitizeSearchBody(searchBody), + const esRequest: estypes.SearchRequest = { + ...this._sanitizeSearchBody(searchBody), from, index: esIndexes, scroll, size, - trackTotalHits: true, + track_total_hits: true, }; if (scroll) { @@ -478,7 +470,9 @@ export default class ElasticSearch extends Service { debug("Search: %j", esRequest); try { - const { body } = await this._client.search(esRequest); + const body = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + let remaining: number; if (body._scroll_id) { const ttl = @@ -497,10 +491,10 @@ export default class ElasticSearch extends Service { { ttl }, ); - body.remaining = body.hits.total.value - body.hits.hits.length; + remaining = totalHitsValue - body.hits.hits.length; } - return await this._formatSearchResult(body, { + return await this._formatSearchResult(body, remaining, { collection, index, targets, @@ -534,7 +528,11 @@ export default class ElasticSearch extends Service { return aliasToTargets; } - async _formatSearchResult(body: any, searchInfo: any = {}) { + async _formatSearchResult( + body: any, + remaining?: number, + searchInfo: any = {}, + ) { let aliasToTargets = {}; const aliasCache = new Map(); @@ -608,7 +606,7 @@ export default class ElasticSearch extends Service { return { aggregations: body.aggregations, hits, - remaining: body.remaining, + remaining, scrollId: body._scroll_id, suggest: body.suggest, total: body.hits.total.value, @@ -640,7 +638,7 @@ export default class ElasticSearch extends Service { debug("Get document: %o", esRequest); try { - const { body } = await this._client.get(esRequest); + const body = await this._client.get(esRequest); return { _id: body._id, @@ -679,10 +677,10 @@ export default class ElasticSearch extends Service { debug("Multi-get documents: %o", esRequest); - let body; + let body: estypes.MgetResponse>; try { - ({ body } = await this._client.mget(esRequest)); // NOSONAR + body = await this._client.mget(esRequest); // NOSONAR } catch (e) { throw this._esWrapper.formatESError(e); } @@ -691,7 +689,7 @@ export default class ElasticSearch extends Service { const items = []; for (const doc of body.docs) { - if (doc.found) { + if (!("error" in doc) && doc.found) { items.push({ _id: doc._id, _source: doc._source, @@ -723,7 +721,7 @@ export default class ElasticSearch extends Service { debug("Count: %o", esRequest); try { - const { body } = await this._client.count(esRequest); + const body = await this._client.count(esRequest); return body.count; } catch (error) { throw this._esWrapper.formatESError(error); @@ -759,8 +757,8 @@ export default class ElasticSearch extends Service { ) { assertIsObject(content); - const esRequest: Index> = { - body: content, + const esRequest: estypes.IndexRequest> = { + document: content, id, index: this._getAlias(index, collection), op_type: id ? "create" : "index", @@ -772,7 +770,7 @@ export default class ElasticSearch extends Service { // Add metadata if (injectKuzzleMeta) { - esRequest.body._kuzzle_info = { + esRequest.document._kuzzle_info = { author: getKuid(userId), createdAt: Date.now(), updatedAt: null, @@ -783,11 +781,11 @@ export default class ElasticSearch extends Service { debug("Create document: %o", esRequest); try { - const { body } = await this._client.index(esRequest); + const body = await this._client.index(esRequest); return { _id: body._id, - _source: esRequest.body, + _source: esRequest.document, _version: body._version, }; } catch (error) { @@ -844,7 +842,7 @@ export default class ElasticSearch extends Service { debug("Create or replace document: %o", esRequest); try { - const { body } = await this._client.index(esRequest); + const body = await this._client.index(esRequest); return { _id: body._id, @@ -885,9 +883,12 @@ export default class ElasticSearch extends Service { injectKuzzleMeta?: boolean; } = {}, ) { - const esRequest: RequestParams.Update> = { + const esRequest: estypes.UpdateRequest< + KRequestBody, + KRequestBody + > = { _source: "true", - body: { doc: content }, + doc: content, id, index: this._getAlias(index, collection), refresh, @@ -900,7 +901,8 @@ export default class ElasticSearch extends Service { if (injectKuzzleMeta) { // Add metadata - esRequest.body.doc._kuzzle_info = { + esRequest.doc._kuzzle_info = { + ...esRequest.doc._kuzzle_info, updatedAt: Date.now(), updater: getKuid(userId), }; @@ -909,7 +911,7 @@ export default class ElasticSearch extends Service { debug("Update document: %o", esRequest); try { - const { body } = await this._client.update(esRequest); + const body = await this._client.update(esRequest); return { _id: body._id, _source: body.get._source, @@ -951,17 +953,18 @@ export default class ElasticSearch extends Service { injectKuzzleMeta?: boolean; } = {}, ) { - const esRequest: RequestParams.Update> = { + const esRequest: estypes.UpdateRequest< + KRequestBody, + KRequestBody + > = { _source: "true", - body: { - doc: content, - upsert: { ...defaultValues, ...content }, - }, + doc: content, id, index: this._getAlias(index, collection), refresh, retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + upsert: { ...defaultValues, ...content }, }; assertNoRouting(esRequest); @@ -972,11 +975,13 @@ export default class ElasticSearch extends Service { const now = Date.now(); if (injectKuzzleMeta) { - esRequest.body.doc._kuzzle_info = { + esRequest.doc._kuzzle_info = { + ...esRequest.doc._kuzzle_info, updatedAt: now, updater: user, }; - esRequest.body.upsert._kuzzle_info = { + esRequest.upsert._kuzzle_info = { + ...esRequest.upsert._kuzzle_info, author: user, createdAt: now, }; @@ -985,7 +990,7 @@ export default class ElasticSearch extends Service { debug("Upsert document: %o", esRequest); try { - const { body } = await this._client.update(esRequest); + const body = await this._client.update(esRequest); return { _id: body._id, @@ -1046,7 +1051,7 @@ export default class ElasticSearch extends Service { } try { - const { body: exists } = await this._client.exists({ id, index: alias }); + const exists = await this._client.exists({ id, index: alias }); if (!exists) { throw kerror.get( @@ -1061,7 +1066,7 @@ export default class ElasticSearch extends Service { debug("Replace document: %o", esRequest); - const { body } = await this._client.index(esRequest); + const body = await this._client.index(esRequest); return { _id: id, @@ -1141,12 +1146,12 @@ export default class ElasticSearch extends Service { fetch?: boolean; } = {}, ) { - const esRequest: RequestParams.DeleteByQuery> = { - body: this._sanitizeSearchBody({ query }), + const esRequest = { + ...this._sanitizeSearchBody({ query }), index: this._getAlias(index, collection), scroll: "5s", size, - }; + } satisfies estypes.DeleteByQueryRequest; if (!isPlainObject(query)) { throw kerror.get("services", "storage", "missing_argument", "body.query"); @@ -1163,14 +1168,14 @@ export default class ElasticSearch extends Service { esRequest.refresh = refresh === "wait_for" ? true : refresh; - const { body } = await this._client.deleteByQuery(esRequest); + const body = await this._client.deleteByQuery(esRequest); return { deleted: body.deleted, documents, - failures: body.failures.map(({ shardId, reason }) => ({ - reason, - shardId, + failures: body.failures.map(({ id, cause }) => ({ + cause, + id, })), total: body.total, }; @@ -1211,7 +1216,7 @@ export default class ElasticSearch extends Service { try { debug("DeleteFields document: %o", esRequest); - const { body } = await this._client.get(esRequest); + const body = await this._client.get>(esRequest); for (const field of fields) { if (_.has(body._source, field)) { @@ -1219,11 +1224,13 @@ export default class ElasticSearch extends Service { } } - body._source._kuzzle_info = { - ...body._source._kuzzle_info, - updatedAt: Date.now(), - updater: getKuid(userId), - }; + if (typeof body._source._kuzzle_info === "object") { + body._source._kuzzle_info = { + ...body._source._kuzzle_info, + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } const newEsRequest = { body: body._source, @@ -1235,7 +1242,7 @@ export default class ElasticSearch extends Service { assertNoRouting(newEsRequest); assertWellFormedRefresh(newEsRequest); - const { body: updated } = await this._client.index(newEsRequest); + const updated = await this._client.index(newEsRequest); return { _id: id, @@ -1340,18 +1347,16 @@ export default class ElasticSearch extends Service { script.params[key] = value; } - const esRequest: RequestParams.UpdateByQuery> = { - body: { - query: this._sanitizeSearchBody({ query }).query, - script, - }, + const esRequest: estypes.UpdateByQueryRequest = { index: this._getAlias(index, collection), + query: this._sanitizeSearchBody({ query }).query, refresh, + script, }; debug("Bulk Update by query: %o", esRequest); - let response; + let response: estypes.UpdateByQueryResponse; try { response = await this._client.updateByQuery(esRequest); @@ -1359,23 +1364,23 @@ export default class ElasticSearch extends Service { throw this._esWrapper.formatESError(error); } - if (response.body.failures.length) { - const errors = response.body.failures.map(({ shardId, reason }) => ({ - reason, - shardId, + if (response.failures.length) { + const errors = response.failures.map(({ id, cause }) => ({ + cause, + id, })); throw kerror.get( "services", "storage", "incomplete_update", - response.body.updated, + response.updated, errors, ); } return { - updated: response.body.updated, + updated: response.updated, }; } @@ -1404,8 +1409,8 @@ export default class ElasticSearch extends Service { scrollTTl?: string; } = {}, ): Promise { - const esRequest: RequestParams.Search = { - body: this._sanitizeSearchBody({ query }), + const esRequest: estypes.SearchRequest = { + ...this._sanitizeSearchBody({ query }), from: 0, index: this._getAlias(index, collection), scroll: scrollTTl, @@ -1416,48 +1421,26 @@ export default class ElasticSearch extends Service { throw kerror.get("services", "storage", "missing_argument", "body.query"); } - const client = this._client; - let results = []; - + const results = []; let processed = 0; let scrollId = null; try { - results = await new Bluebird((resolve, reject) => { - this._client.search( - esRequest, - async function getMoreUntilDone( - error, - { body: { hits, _scroll_id } }, - ) { - if (error) { - reject(error); - return; - } - - scrollId = _scroll_id; - - const ret = callback(hits.hits); - - results.push(await ret); - processed += hits.hits.length; - - if (hits.total.value !== processed) { - client.scroll( - { - scroll: esRequest.scroll, - scroll_id: _scroll_id, - }, - getMoreUntilDone, - ); - } else { - resolve(results); - } - }, - ); - }); + let body = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + + while (processed < totalHitsValue && body.hits.hits.length > 0) { + scrollId = body._scroll_id; + results.push(await callback(body.hits.hits)); + processed += body.hits.hits.length; + + body = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: scrollId, + }); + } } finally { - this.clearScroll(scrollId); + await this.clearScroll(scrollId); } return results; @@ -1478,10 +1461,10 @@ export default class ElasticSearch extends Service { async createIndex(index: string) { this._assertValidIndexAndCollection(index); - let body: ApiResponse>["body"]; + let body: estypes.CatAliasesResponse; try { - body = (await this._client.cat.aliases({ format: "json" })).body; + body = await this._client.cat.aliases({ format: "json" }); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -1527,7 +1510,10 @@ export default class ElasticSearch extends Service { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {}, + }: { + mappings?: estypes.MappingTypeMapping; + settings?: Record; + } = {}, ) { this._assertValidIndexAndCollection(index, collection); @@ -1553,15 +1539,13 @@ export default class ElasticSearch extends Service { await mutex.unlock(); } - const esRequest: RequestParams.IndicesCreate> = { - body: { - aliases: { - [this._getAlias(index, collection)]: {}, - }, - mappings: {}, - settings, + const esRequest: estypes.IndicesCreateRequest = { + aliases: { + [this._getAlias(index, collection)]: {}, }, index: await this._getAvailableIndice(index, collection), + mappings: {}, + settings, wait_for_active_shards: await this._getWaitForActiveShards(), }; @@ -1574,7 +1558,7 @@ export default class ElasticSearch extends Service { this._checkMappings(mappings); - esRequest.body.mappings = { + esRequest.mappings = { _meta: mappings._meta || this._config.commonMapping._meta, dynamic: mappings.dynamic || this._config.commonMapping.dynamic, properties: _.merge( @@ -1583,12 +1567,12 @@ export default class ElasticSearch extends Service { ), }; - esRequest.body.settings.number_of_replicas = - esRequest.body.settings.number_of_replicas || + esRequest.settings.number_of_replicas = + esRequest.settings.number_of_replicas || this._config.defaultSettings.number_of_replicas; - esRequest.body.settings.number_of_shards = - esRequest.body.settings.number_of_shards || + esRequest.settings.number_of_shards = + esRequest.settings.number_of_shards || this._config.defaultSettings.number_of_shards; try { @@ -1619,14 +1603,14 @@ export default class ElasticSearch extends Service { */ async getSettings(index: string, collection: string) { const indice = await this._getIndice(index, collection); - const esRequest: RequestParams.IndicesGetSettings = { + const esRequest: estypes.IndicesGetSettingsRequest = { index: indice, }; debug("Get settings: %o", esRequest); try { - const { body } = await this._client.indices.getSettings(esRequest); + const body = await this._client.indices.getSettings(esRequest); return body[indice].settings.index; } catch (error) { @@ -1660,7 +1644,7 @@ export default class ElasticSearch extends Service { debug("Get mapping: %o", esRequest); try { - const { body } = await this._client.indices.getMapping(esRequest); + const body = await this._client.indices.getMapping(esRequest); const properties = includeKuzzleMeta ? body[indice].mappings.properties @@ -1691,7 +1675,10 @@ export default class ElasticSearch extends Service { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {}, + }: { + mappings?: estypes.MappingTypeMapping; + settings?: Record; + } = {}, ) { const esRequest = { index: await this._getIndice(index, collection), @@ -1764,8 +1751,7 @@ export default class ElasticSearch extends Service { * @returns {Promise.} {} */ async updateSearchIndex(index: string, collection: string) { - const esRequest: RequestParams.UpdateByQuery> = { - body: {}, + const esRequest: estypes.UpdateByQueryRequest = { // @cluster: conflicts when two nodes start at the same time conflicts: "proceed", index: this._getAlias(index, collection), @@ -1797,10 +1783,9 @@ export default class ElasticSearch extends Service { async updateMapping( index: string, collection: string, - mappings: TypeMapping = {}, + mappings: estypes.MappingTypeMapping = {}, ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { - const esRequest: RequestParams.IndicesPutMapping> = { - body: {}, + let esRequest: estypes.IndicesPutMappingRequest = { index: this._getAlias(index, collection), }; @@ -1812,7 +1797,8 @@ export default class ElasticSearch extends Service { this._checkMappings(mappings); - esRequest.body = { + esRequest = { + ...esRequest, _meta: mappings._meta || collectionMappings._meta, dynamic: mappings.dynamic || collectionMappings.dynamic, properties: mappings.properties, @@ -1832,8 +1818,8 @@ export default class ElasticSearch extends Service { ); return { - _meta: esRequest.body._meta, - dynamic: esRequest.body.dynamic, + _meta: esRequest._meta, + dynamic: JSON.stringify(esRequest.dynamic), properties: fullProperties, }; } @@ -1894,13 +1880,11 @@ export default class ElasticSearch extends Service { await this._client.indices.create({ ...esRequest, - body: { - aliases: { - [this._getAlias(index, collection)]: {}, - }, - mappings, - settings, + aliases: { + [this._getAlias(index, collection)]: {}, }, + mappings, + settings, wait_for_active_shards: await this._getWaitForActiveShards(), }); @@ -2038,10 +2022,10 @@ export default class ElasticSearch extends Service { * @returns {Promise.} Collection names */ async listCollections(index, { includeHidden = false } = {}) { - let body; + let body: estypes.CatAliasesResponse; try { - ({ body } = await this._client.cat.aliases({ format: "json" })); + body = await this._client.cat.aliases({ format: "json" }); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -2059,10 +2043,10 @@ export default class ElasticSearch extends Service { * @returns {Promise.} Index names */ async listIndexes() { - let body: ApiResponse["body"]; + let body: estypes.CatAliasesResponse; try { - ({ body } = await this._client.cat.aliases({ format: "json" })); + body = await this._client.cat.aliases({ format: "json" }); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -2080,10 +2064,10 @@ export default class ElasticSearch extends Service { * @returns {Object.} Object */ async getSchema() { - let body: ApiResponse["body"]; + let body: estypes.CatAliasesResponse; try { - ({ body } = await this._client.cat.aliases({ format: "json" })); + body = await this._client.cat.aliases({ format: "json" }); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -2107,10 +2091,10 @@ export default class ElasticSearch extends Service { * @returns {Promise.} [ { alias, index, collection, indice } ] */ async listAliases() { - let body; + let body: estypes.CatAliasesResponse; try { - ({ body } = await this._client.cat.aliases({ format: "json" })); + body = await this._client.cat.aliases({ format: "json" }); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -2140,7 +2124,7 @@ export default class ElasticSearch extends Service { */ async deleteCollection(index: string, collection: string): Promise { const indice = await this._getIndice(index, collection); - const esRequest: RequestParams.IndicesDelete = { + const esRequest: estypes.IndicesDeleteRequest = { index: indice, }; @@ -2177,7 +2161,7 @@ export default class ElasticSearch extends Service { const deleted = new Set(); try { - const { body } = await this._client.cat.aliases({ format: "json" }); + const body = await this._client.cat.aliases({ format: "json" }); const esRequest = body.reduce( (request, { alias, index: indice }) => { @@ -2237,21 +2221,19 @@ export default class ElasticSearch extends Service { * @returns {Promise.} { _shards } */ async refreshCollection(index: string, collection: string) { - const esRequest: RequestParams.IndicesRefresh = { + const esRequest: estypes.IndicesRefreshRequest = { index: this._getAlias(index, collection), }; - let _shards: any; + let body: estypes.IndicesRefreshResponse; try { - ({ - body: { _shards }, - } = await this._client.indices.refresh(esRequest)); + body = await this._client.indices.refresh(esRequest); } catch (error) { throw this._esWrapper.formatESError(error); } - return { _shards }; + return body; } /** @@ -2268,15 +2250,13 @@ export default class ElasticSearch extends Service { collection: string, id: string, ): Promise { - const esRequest: RequestParams.Exists = { + const esRequest: estypes.ExistsRequest = { id, index: this._getAlias(index, collection), }; try { - const { body: exists } = await this._client.exists(esRequest); - - return exists; + return await this._client.exists(esRequest); } catch (error) { throw this._esWrapper.formatESError(error); } @@ -2298,20 +2278,18 @@ export default class ElasticSearch extends Service { return { errors: [], item: [] }; } - const esRequest: RequestParams.Mget = { + const esRequest: estypes.MgetRequest = { _source: "false", - body: { - docs: ids.map((_id) => ({ _id })), - }, + docs: ids.map((_id) => ({ _id })), index: this._getAlias(index, collection), }; debug("mExists: %o", esRequest); - let body; + let body: estypes.MgetResponse; try { - ({ body } = await this._client.mget(esRequest)); // NOSONAR + body = await this._client.mget(esRequest); // NOSONAR } catch (e) { throw this._esWrapper.formatESError(e); } @@ -2322,7 +2300,7 @@ export default class ElasticSearch extends Service { for (let i = 0; i < body.docs.length; i++) { const doc = body.docs[i]; - if (doc.found) { + if (!("error" in doc) && doc.found) { items.push(doc._id); } else { errors.push(doc._id); @@ -2413,13 +2391,13 @@ export default class ElasticSearch extends Service { this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); // prepare the mget request, but only for document having a specified id - const { body } = + const body = documentsToGet.length > 0 ? await this._client.mget({ - body: { docs: documentsToGet }, + docs: documentsToGet, index: alias, }) - : { body: { docs: [] } }; + : { docs: [] }; const existingDocuments = body.docs; const esRequest = { @@ -2440,7 +2418,9 @@ export default class ElasticSearch extends Service { // Documents are retrieved in the same order than we got them from user if (typeof document._id === "string" && existingDocuments[idx]) { - if (existingDocuments[idx].found) { + const doc = existingDocuments[idx]; + + if (!("error" in doc) && doc.found) { document._source._kuzzle_info = undefined; rejected.push({ @@ -2797,8 +2777,8 @@ export default class ElasticSearch extends Service { return { errors: rejected, items: [] }; } - const { body } = await this._client.mget({ - body: { docs: documentsToGet }, + const body = await this._client.mget({ + docs: documentsToGet, index: alias, }); @@ -2819,7 +2799,9 @@ export default class ElasticSearch extends Service { const document = extractedDocuments[i]; // Documents are retrieved in the same order than we got them from user - if (existingDocuments[i]?.found) { + const doc = existingDocuments[i]; + + if (!("error" in doc) && doc?.found) { esRequest.body.push({ index: { _id: document._id, @@ -2941,7 +2923,7 @@ export default class ElasticSearch extends Service { * @returns {Promise.} results */ async _mExecute( - esRequest: RequestParams.Bulk, + esRequest: estypes.BulkRequest, documents: JSONObject[], partialErrors: JSONObject[] = [], { limits = true, source = true } = {}, @@ -2952,17 +2934,16 @@ export default class ElasticSearch extends Service { return kerror.reject("services", "storage", "write_limit_exceeded"); } - let response = { body: { items: [] } }; + let body = { items: [] }; if (documents.length > 0) { try { - response = await this._client.bulk(esRequest); + body = await this._client.bulk(esRequest); } catch (error) { throw this._esWrapper.formatESError(error); } } - const body = response.body; const successes = []; /** @@ -3188,10 +3169,9 @@ export default class ElasticSearch extends Service { * Given an alias name, returns the associated index name. */ async _checkIfAliasExists(aliasName) { - const { body } = await this._client.indices.existsAlias({ + return this._client.indices.existsAlias({ name: aliasName, }); - return body; } /** @@ -3206,7 +3186,7 @@ export default class ElasticSearch extends Service { */ async _getIndice(index: string, collection: string): Promise { const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - const { body } = await this._client.cat.aliases({ + const body = await this._client.cat.aliases({ format: "json", name: alias, }); @@ -3234,12 +3214,12 @@ export default class ElasticSearch extends Service { * @private */ async _getSettings( - esRequest: RequestParams.IndicesGetSettings, + esRequest: estypes.IndicesGetSettingsRequest, ): Promise { const response = await this._client.indices.getSettings(esRequest); const index = esRequest.index as string; - return response.body[index].settings; + return response[index].settings; } /** @@ -3259,7 +3239,7 @@ export default class ElasticSearch extends Service { INDEX_PREFIX_POSITION_IN_ALIAS, ); - if (!(await this._client.indices.exists({ index: indice })).body) { + if (!(await this._client.indices.exists({ index: indice }))) { return indice; } @@ -3276,11 +3256,9 @@ export default class ElasticSearch extends Service { .toString(); } - const response = await this._client.indices.exists({ + notAvailable = await this._client.indices.exists({ index: indice + suffix, }); - - notAvailable = response.body; } while (notAvailable); return indice + suffix; @@ -3316,7 +3294,7 @@ export default class ElasticSearch extends Service { */ async generateMissingAliases() { try { - const { body } = await this._client.cat.indices({ format: "json" }); + const body = await this._client.cat.indices({ format: "json" }); const indices = body.map(({ index: indice }) => indice); const aliases = await this.listAliases(); @@ -3438,17 +3416,15 @@ export default class ElasticSearch extends Service { return; } - const esRequest: IndicesCreate> = { - body: { - aliases: { - [this._getAlias(index, HIDDEN_COLLECTION)]: {}, - }, - settings: { - number_of_replicas: this._config.defaultSettings.number_of_replicas, - number_of_shards: this._config.defaultSettings.number_of_shards, - }, + const esRequest: estypes.IndicesCreateRequest = { + aliases: { + [this._getAlias(index, HIDDEN_COLLECTION)]: {}, }, index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), + settings: { + number_of_replicas: this._config.defaultSettings.number_of_replicas, + number_of_shards: this._config.defaultSettings.number_of_shards, + }, wait_for_active_shards: await this._getWaitForActiveShards(), }; @@ -3468,8 +3444,8 @@ export default class ElasticSearch extends Service { * To find the best value for this setting, we need to take into account * the number of nodes in the cluster and the number of shards per index. */ - async _getWaitForActiveShards(): Promise { - const { body } = await this._client.cat.nodes({ format: "json" }); + async _getWaitForActiveShards(): Promise { + const body = await this._client.cat.nodes({ format: "json" }); const numberOfNodes = body.length; @@ -3477,7 +3453,7 @@ export default class ElasticSearch extends Service { return "all"; } - return "1"; + return 1; } /** @@ -3489,26 +3465,22 @@ export default class ElasticSearch extends Service { * * @returns {Promise.} resolve to an array of documents */ - async _getAllDocumentsFromQuery( - esRequest: RequestParams.Search>, - ) { - let { - body: { hits, _scroll_id }, - } = await this._client.search(esRequest); + async _getAllDocumentsFromQuery(esRequest: estypes.SearchRequest) { + let { hits, _scroll_id } = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(hits); - if (hits.total.value > global.kuzzle.config.limits.documentsWriteCount) { + if (totalHitsValue > global.kuzzle.config.limits.documentsWriteCount) { throw kerror.get("services", "storage", "write_limit_exceeded"); } let documents = hits.hits.map((h: JSONObject) => ({ _id: h._id, _source: h._source, + body: {}, })); - while (hits.total.value !== documents.length) { - ({ - body: { hits, _scroll_id }, - } = await this._client.scroll({ + while (totalHitsValue !== documents.length) { + ({ hits, _scroll_id } = await this._client.scroll({ scroll: esRequest.scroll, scroll_id: _scroll_id, })); @@ -3517,6 +3489,7 @@ export default class ElasticSearch extends Service { hits.hits.map((h: JSONObject) => ({ _id: h._id, _source: h._source, + body: {}, })), ); } @@ -3678,12 +3651,12 @@ export default class ElasticSearch extends Service { wait_for_no_initializing_shards: true, }); - if (health.body.number_of_pending_tasks === 0) { + if (health.number_of_pending_tasks === 0) { global.kuzzle.log.info("[✔] Elasticsearch is ready"); esState = esStateEnum.OK; } else { global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining`, + `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining`, ); await Bluebird.delay(1000); } @@ -3767,6 +3740,14 @@ export default class ElasticSearch extends Service { } } } + + _getHitsTotalValue(hits: estypes.SearchHitsMetadata): number { + if (typeof hits.total === "number") { + return hits.total; + } + + return hits.total.value; + } } /** @@ -3801,7 +3782,7 @@ function findDynamic(mappings, path = [], results = {}) { * @throws */ function assertNoRouting(esRequest) { - if (esRequest.body._routing) { + if (esRequest._routing) { throw kerror.get("services", "storage", "no_routing"); } } diff --git a/package-lock.json b/package-lock.json index 92f1c64fa5..9586947868 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "2.27.3", "license": "Apache-2.0", "dependencies": { - "@elastic/elasticsearch": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", + "@elastic/elasticsearch": "8.11.0", "aedes": "0.46.3", "bluebird": "3.7.2", "cli-color": "2.0.3", @@ -837,24 +837,21 @@ } }, "node_modules/@elastic/elasticsearch": { - "version": "7.13.0", - "resolved": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", - "integrity": "sha512-yXK4IlNYWdVkwIEBi+3m8/OoZKlIYS8F5AauYD0E4EBNAXDoQcWmuVaXdfBlT3nXSMY+m8dEptNg7BTbA9vRlg==", - "license": "Apache-2.0", + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.11.0.tgz", + "integrity": "sha512-1UEQFdGLuKdROLJnMTjegasRM3X9INm/PVADoIVgdTfuv6DeJ17UMuNwYSkCrLrC0trLjjGV4YganpbJJX/VLg==", "dependencies": { - "debug": "^4.3.1", - "hpagent": "^0.1.1", - "ms": "^2.1.3", - "secure-json-parse": "^2.4.0" + "@elastic/transport": "^8.4.0", + "tslib": "^2.4.0" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@elastic/transport": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/@elastic/transport/-/transport-8.3.4.tgz", - "integrity": "sha512-+0o8o74sbzu3BO7oOZiP9ycjzzdOt4QwmMEjFc1zfO7M0Fh7QX1xrpKqZbSd8vBwihXNlSq/EnMPfgD2uFEmFg==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@elastic/transport/-/transport-8.4.0.tgz", + "integrity": "sha512-Yb3fDa7yGD0ca3uMbL64M3vM1cE5h5uHmBcTjkdB4VpCasRNKSd09iDpwqX8zX1tbBtxcaKYLceKthWvPeIxTw==", "dependencies": { "debug": "^4.3.4", "hpagent": "^1.0.0", @@ -864,15 +861,7 @@ "undici": "^5.22.1" }, "engines": { - "node": ">=14" - } - }, - "node_modules/@elastic/transport/node_modules/hpagent": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-1.2.0.tgz", - "integrity": "sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==", - "engines": { - "node": ">=14" + "node": ">=16" } }, "node_modules/@eslint-community/eslint-utils": { @@ -932,9 +921,9 @@ } }, "node_modules/@fastify/busboy": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.0.0.tgz", - "integrity": "sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.0.tgz", + "integrity": "sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==", "engines": { "node": ">=14" } @@ -2412,7 +2401,8 @@ "node_modules/archy": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", - "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==" + "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==", + "dev": true }, "node_modules/are-we-there-yet": { "version": "1.1.7", @@ -3407,6 +3397,7 @@ "version": "3.9.0", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, "funding": [ { "type": "github", @@ -3472,6 +3463,7 @@ "version": "0.5.1", "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.1.tgz", "integrity": "sha512-7Qg2Jrep1S/+Q3EceiZtQcDPWxhAvBw+ERf1162v4sikJrvojMHFqXt8QIVha8UlH9rgU0BeWPytZ9/TzYqlUw==", + "dev": true, "dependencies": { "object-assign": "^4.1.0", "string-width": "^2.1.1" @@ -3682,7 +3674,7 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", - "devOptional": true, + "dev": true, "engines": { "node": ">=0.1.90" } @@ -6047,9 +6039,12 @@ } }, "node_modules/hpagent": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-0.1.2.tgz", - "integrity": "sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ==" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-1.2.0.tgz", + "integrity": "sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==", + "engines": { + "node": ">=14" + } }, "node_modules/html-escaper": { "version": "2.0.2", @@ -7631,7 +7626,8 @@ "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true }, "node_modules/json-schema": { "version": "0.4.0", @@ -12612,6 +12608,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, "dependencies": { "aggregate-error": "^3.0.0" }, @@ -15277,7 +15274,8 @@ "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true }, "node_modules/thenify": { "version": "3.3.1", @@ -15710,9 +15708,9 @@ } }, "node_modules/undici": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.26.5.tgz", - "integrity": "sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==", + "version": "5.28.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz", + "integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -16069,18 +16067,6 @@ "elastic-apm-node": "^3.20.0" } }, - "node_modules/winston-elasticsearch/node_modules/@elastic/elasticsearch": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.10.0.tgz", - "integrity": "sha512-RIEyqz0D18bz/dK+wJltaak+7wKaxDELxuiwOJhuMrvbrBsYDFnEoTdP/TZ0YszHBgnRPGqBDBgH/FHNgHObiQ==", - "dependencies": { - "@elastic/transport": "^8.3.4", - "tslib": "^2.4.0" - }, - "engines": { - "node": ">=14" - } - }, "node_modules/winston-elasticsearch/node_modules/retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", @@ -16227,6 +16213,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, "dependencies": { "imurmurhash": "^0.1.4", "signal-exit": "^3.0.7" diff --git a/package.json b/package.json index e97734340a..bcba98a127 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ "lib": "lib" }, "dependencies": { - "@elastic/elasticsearch": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", + "@elastic/elasticsearch": "8.11.0", "aedes": "0.46.3", "bluebird": "3.7.2", "cli-color": "2.0.3", From 50bfe580db9e86d6e1de761605ac86debdae2e48 Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Thu, 4 Jan 2024 17:47:52 +0100 Subject: [PATCH 02/59] fix(storage): update more types --- docker/scripts/start-kuzzle-test.ts | 6 +++--- lib/types/storage/Elasticsearch.ts | 6 +++--- package.json | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docker/scripts/start-kuzzle-test.ts b/docker/scripts/start-kuzzle-test.ts index 8a308c0597..12cf6e6662 100644 --- a/docker/scripts/start-kuzzle-test.ts +++ b/docker/scripts/start-kuzzle-test.ts @@ -423,11 +423,11 @@ app.controller.register("tests", { const response = await client.index(esRequest); const response2 = await app.storage.storageClient.index(esRequest); - should(omit(response.body, ["_version", "result", "_seq_no"])).match( - omit(response2.body, ["_version", "result", "_seq_no"]) + should(omit(response, ["_version", "result", "_seq_no"])).match( + omit(response2, ["_version", "result", "_seq_no"]) ); - return response.body; + return response; }, http: [{ verb: "post", path: "/tests/storage-client/:index" }], }, diff --git a/lib/types/storage/Elasticsearch.ts b/lib/types/storage/Elasticsearch.ts index 9a1f2adf3d..c58c38d0fe 100644 --- a/lib/types/storage/Elasticsearch.ts +++ b/lib/types/storage/Elasticsearch.ts @@ -1,12 +1,12 @@ -import { ByteSize, ClusterNodesStats } from "@elastic/elasticsearch/api/types"; +import { estypes } from "@elastic/elasticsearch"; export type InfoResult = { type: string; version: string; status?: string; lucene?: string; - spaceUsed?: ByteSize; - nodes?: ClusterNodesStats; + spaceUsed?: estypes.ByteSize; + nodes?: estypes.ClusterStatsClusterNodes; }; export type KRequestBody = T & { diff --git a/package.json b/package.json index bcba98a127..2c6db673d5 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "test:lint:ts": "eslint ./lib --ext .ts --config .eslintc-ts.json", "test:lint": "npm run test:lint:js && npm run test:lint:ts", "test:unit": "DEBUG= npx --node-arg=--trace-warnings mocha --exit", - "test": "npm run clean && npm run --silent test:lint && npm run build && npm run test:unit:coverage && npm run test:functional" + "test": "npm run clean && npm run --silent test:lint && npm run build && npm run test:unit && npm run test:functional" }, "directories": { "lib": "lib" From 9ce4aa49bcbd8b5668d8e8d0d9cd1ce1b48d1a58 Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Fri, 5 Jan 2024 12:10:49 +0100 Subject: [PATCH 03/59] test: fix StorageClient maxRetries --- lib/service/storage/elasticsearch.ts | 20 +++----------------- test/core/backend/BackendStorage.test.js | 2 +- 2 files changed, 4 insertions(+), 18 deletions(-) diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index d7496f50eb..4b91b206e4 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -21,7 +21,7 @@ import _ from "lodash"; -import { Client as StorageClient, estypes } from "@elastic/elasticsearch"; +import { Client as StorageClient, ClientOptions, estypes } from '@elastic/elasticsearch'; import { InfoResult, JSONObject, @@ -106,22 +106,8 @@ export default class ElasticSearch extends Service { * * @returns {Object} */ - static buildClient(config) { - // Passed to Elasticsearch's client to make it use - // Bluebird instead of ES6 promises - const defer = function defer() { - let resolve; - let reject; - - const promise = new Bluebird((res, rej) => { - resolve = res; - reject = rej; - }); - - return { promise, reject, resolve }; - }; - - return new StorageClient({ defer, ...config }); + static buildClient(config: ClientOptions) { + return new StorageClient(config); } constructor(config, scope = scopeEnum.PUBLIC) { diff --git a/test/core/backend/BackendStorage.test.js b/test/core/backend/BackendStorage.test.js index 92aac0b6f9..bbe3872ca6 100644 --- a/test/core/backend/BackendStorage.test.js +++ b/test/core/backend/BackendStorage.test.js @@ -34,7 +34,7 @@ describe("Backend", () => { should(client.connectionPool.connections[0].url.toString()).be.eql( "http://es:9200/", ); - should(client.helpers.maxRetries).be.eql(42); + should(client.helpers[Object.getOwnPropertySymbols(client.helpers).find((s) => s.description === 'max retries')]).be.eql(42); }); }); From 7c8d5b6dc98a0ca1ded9b8084246445a48f184e3 Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Fri, 5 Jan 2024 12:26:31 +0100 Subject: [PATCH 04/59] test: fix ES mock client --- test/mocks/service/elasticsearchClient.mock.js | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/test/mocks/service/elasticsearchClient.mock.js b/test/mocks/service/elasticsearchClient.mock.js index ff7591564f..23dc5d13e8 100644 --- a/test/mocks/service/elasticsearchClient.mock.js +++ b/test/mocks/service/elasticsearchClient.mock.js @@ -17,10 +17,8 @@ class ElasticsearchClientMock { this.get = sinon.stub().resolves(); this.index = sinon.stub().resolves(); this.info = sinon.stub().resolves({ - body: { - version: { - number: "7.0.0", - }, + version: { + number: "8.0.0", }, }); this.mget = sinon.stub().resolves(); @@ -37,9 +35,7 @@ class ElasticsearchClientMock { this.cluster = { health: sinon.stub().resolves({ - body: { - number_of_pending_tasks: 0, - }, + number_of_pending_tasks: 0, }), stats: sinon.stub().resolves(), }; From 4a8038e7729a9cdf5b9d7b2c0540899f8911d11c Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Fri, 5 Jan 2024 12:27:18 +0100 Subject: [PATCH 05/59] feat: only support ES 8.x --- lib/service/storage/elasticsearch.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 4b91b206e4..9381559835 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -192,7 +192,7 @@ export default class ElasticSearch extends Service { if ( version && - !semver.satisfies(semver.coerce(version.number), ">= 7.0.0") + !semver.satisfies(semver.coerce(version.number), "^8.0.0") ) { throw kerror.get( "services", From dfb83b7fdd8e0ed6554d1b6980124d94841cd187 Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Fri, 5 Jan 2024 12:28:31 +0100 Subject: [PATCH 06/59] chore: update gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index bfdc0cb01b..dfba21a9ad 100644 --- a/.gitignore +++ b/.gitignore @@ -162,6 +162,7 @@ lib/model/security/profile.js lib/model/security/role.js lib/model/security/token.js lib/model/security/user.js +lib/service/storage/elasticsearch.js lib/types/ClientConnection.js lib/types/config/DumpConfiguration.js lib/types/config/HttpConfiguration.js @@ -206,6 +207,7 @@ lib/types/realtime/RoomList.js lib/types/RequestPayload.js lib/types/ResponsePayload.js lib/types/RoleDefinition.js +lib/types/storage/Elasticsearch.js lib/types/StrategyDefinition.js lib/types/Target.js lib/types/Token.js From fcb6ad80bee54c8d2f112979d4983a90087e741c Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Fri, 5 Jan 2024 12:38:46 +0100 Subject: [PATCH 07/59] test: fix unit tests --- lib/core/storage/clientAdapter.js | 2 +- lib/service/storage/elasticsearch.ts | 110 +- test/core/backend/BackendStorage.test.js | 8 +- test/service/storage/elasticsearch.test.js | 1067 +++++++++----------- 4 files changed, 534 insertions(+), 653 deletions(-) diff --git a/lib/core/storage/clientAdapter.js b/lib/core/storage/clientAdapter.js index 10e7e7bc4a..4374cd94d9 100644 --- a/lib/core/storage/clientAdapter.js +++ b/lib/core/storage/clientAdapter.js @@ -454,7 +454,7 @@ class ClientAdapter { * @param {string} collection * @param {Object} query * @param {Object} [opts] -- see Elasticsearch "deleteByQuery" options - * @returns {Promise.<{ documents, total, deleted, failures: [ _shardId, reason ] }>} + * @returns {Promise.<{ documents, total, deleted, failures: [ id, reason ] }>} */ global.kuzzle.onAsk( `core:storage:${this.scope}:document:deleteByQuery`, diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 9381559835..9c9188282f 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -21,7 +21,11 @@ import _ from "lodash"; -import { Client as StorageClient, ClientOptions, estypes } from '@elastic/elasticsearch'; +import { + Client as StorageClient, + ClientOptions, + estypes, +} from "@elastic/elasticsearch"; import { InfoResult, JSONObject, @@ -190,10 +194,7 @@ export default class ElasticSearch extends Service { const { version } = await this._client.info(); - if ( - version && - !semver.satisfies(semver.coerce(version.number), "^8.0.0") - ) { + if (version && !semver.satisfies(semver.coerce(version.number), "^8.0.0")) { throw kerror.get( "services", "storage", @@ -805,8 +806,8 @@ export default class ElasticSearch extends Service { injectKuzzleMeta?: boolean; } = {}, ) { - const esRequest = { - body: content, + const esRequest: estypes.IndexRequest> = { + document: content, id, index: this._getAlias(index, collection), refresh, @@ -817,7 +818,7 @@ export default class ElasticSearch extends Service { // Add metadata if (injectKuzzleMeta) { - esRequest.body._kuzzle_info = { + esRequest.document._kuzzle_info = { author: getKuid(userId), createdAt: Date.now(), updatedAt: Date.now(), @@ -832,7 +833,7 @@ export default class ElasticSearch extends Service { return { _id: body._id, - _source: esRequest.body, + _source: esRequest.document, _version: body._version, created: body.result === "created", // Needed by the notifier }; @@ -1016,8 +1017,8 @@ export default class ElasticSearch extends Service { } = {}, ) { const alias = this._getAlias(index, collection); - const esRequest = { - body: content, + const esRequest: estypes.IndexRequest> = { + document: content, id, index: alias, refresh, @@ -1028,7 +1029,7 @@ export default class ElasticSearch extends Service { if (injectKuzzleMeta) { // Add metadata - esRequest.body._kuzzle_info = { + esRequest.document._kuzzle_info = { author: getKuid(userId), createdAt: Date.now(), updatedAt: Date.now(), @@ -1056,7 +1057,7 @@ export default class ElasticSearch extends Service { return { _id: id, - _source: esRequest.body, + _source: esRequest.document, _version: body._version, }; } catch (error) { @@ -1116,7 +1117,7 @@ export default class ElasticSearch extends Service { * @param {Object} query - Query to match documents * @param {Object} options - size (undefined), refresh (undefined), fetch (true) * - * @returns {Promise.<{ documents, total, deleted, failures: Array<{ _shardId, reason }> }>} + * @returns {Promise.<{ documents, total, deleted, failures: Array<{ id, reason }> }>} */ async deleteByQuery( index: string, @@ -1136,8 +1137,7 @@ export default class ElasticSearch extends Service { ...this._sanitizeSearchBody({ query }), index: this._getAlias(index, collection), scroll: "5s", - size, - } satisfies estypes.DeleteByQueryRequest; + } satisfies estypes.DeleteByQueryRequest | estypes.SearchRequest; if (!isPlainObject(query)) { throw kerror.get("services", "storage", "missing_argument", "body.query"); @@ -1147,21 +1147,27 @@ export default class ElasticSearch extends Service { let documents = []; if (fetch) { - documents = await this._getAllDocumentsFromQuery(esRequest); + documents = await this._getAllDocumentsFromQuery({ + ...esRequest, + size, + }); } debug("Delete by query: %o", esRequest); esRequest.refresh = refresh === "wait_for" ? true : refresh; - const body = await this._client.deleteByQuery(esRequest); + const body = await this._client.deleteByQuery({ + ...esRequest, + max_docs: size, + }); return { deleted: body.deleted, documents, failures: body.failures.map(({ id, cause }) => ({ - cause, id, + reason: cause.reason, })), total: body.total, }; @@ -1195,14 +1201,14 @@ export default class ElasticSearch extends Service { } = {}, ) { const alias = this._getAlias(index, collection); - const esRequest = { + const esRequest: estypes.GetRequest = { id, index: alias, }; try { debug("DeleteFields document: %o", esRequest); - const body = await this._client.get>(esRequest); + const body = await this._client.get(esRequest); for (const field of fields) { if (_.has(body._source, field)) { @@ -1210,16 +1216,22 @@ export default class ElasticSearch extends Service { } } + const updatedInfos = { + updatedAt: Date.now(), + updater: getKuid(userId), + }; + if (typeof body._source._kuzzle_info === "object") { body._source._kuzzle_info = { ...body._source._kuzzle_info, - updatedAt: Date.now(), - updater: getKuid(userId), + ...updatedInfos, }; + } else { + body._source._kuzzle_info = updatedInfos; } - const newEsRequest = { - body: body._source, + const newEsRequest: estypes.IndexRequest = { + document: body._source, id, index: alias, refresh, @@ -1623,7 +1635,7 @@ export default class ElasticSearch extends Service { } = {}, ) { const indice = await this._getIndice(index, collection); - const esRequest = { + const esRequest: estypes.IndicesGetMappingRequest = { index: indice, }; @@ -1805,7 +1817,7 @@ export default class ElasticSearch extends Service { return { _meta: esRequest._meta, - dynamic: JSON.stringify(esRequest.dynamic), + dynamic: esRequest.dynamic.toString(), properties: fullProperties, }; } @@ -2386,9 +2398,9 @@ export default class ElasticSearch extends Service { : { docs: [] }; const existingDocuments = body.docs; - const esRequest = { - body: [], + const esRequest: estypes.BulkRequest = { index: alias, + operations: [], refresh, timeout, }; @@ -2418,20 +2430,20 @@ export default class ElasticSearch extends Service { status: 400, }); } else { - esRequest.body.push({ + esRequest.operations.push({ index: { _id: document._id, _index: alias, }, }); - esRequest.body.push(document._source); + esRequest.operations.push(document._source); toImport.push(document); } idx++; } else { - esRequest.body.push({ index: { _index: alias } }); - esRequest.body.push(document._source); + esRequest.operations.push({ index: { _index: alias } }); + esRequest.operations.push(document._source); toImport.push(document); } @@ -2478,9 +2490,9 @@ export default class ElasticSearch extends Service { } const alias = this._getAlias(index, collection); - const esRequest = { - body: [], + const esRequest: estypes.BulkRequest = { index: alias, + operations: [], refresh, timeout, }; @@ -2489,7 +2501,7 @@ export default class ElasticSearch extends Service { kuzzleMeta, ); - esRequest.body = []; + esRequest.operations = []; /** * @warning Critical code section @@ -2497,13 +2509,13 @@ export default class ElasticSearch extends Service { * request can contain more than 10K elements */ for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.body.push({ + esRequest.operations.push({ index: { _id: extractedDocuments[i]._id, _index: alias, }, }); - esRequest.body.push(extractedDocuments[i]._source); + esRequest.operations.push(extractedDocuments[i]._source); } /* end critical code section */ @@ -2538,9 +2550,9 @@ export default class ElasticSearch extends Service { ) { const alias = this._getAlias(index, collection), toImport = [], - esRequest = { - body: [], + esRequest: estypes.BulkRequest = { index: alias, + operations: [], refresh, timeout, }, @@ -2564,7 +2576,7 @@ export default class ElasticSearch extends Service { const extractedDocument = extractedDocuments[i]; if (typeof extractedDocument._id === "string") { - esRequest.body.push({ + esRequest.operations.push({ update: { _id: extractedDocument._id, _index: alias, @@ -2575,7 +2587,7 @@ export default class ElasticSearch extends Service { // _source: true => makes ES return the updated document source in the // response. Required by the real-time notifier component - esRequest.body.push({ + esRequest.operations.push({ _source: true, doc: extractedDocument._source, }); @@ -2638,8 +2650,8 @@ export default class ElasticSearch extends Service { } = {}, ) { const alias = this._getAlias(index, collection); - const esRequest = { - body: [], + const esRequest: estypes.BulkRequest = { + operations: [], refresh, timeout, }; @@ -2676,7 +2688,7 @@ export default class ElasticSearch extends Service { * request can contain more than 10K elements */ for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.body.push( + esRequest.operations.push( { update: { _id: extractedDocuments[i]._id, @@ -2769,8 +2781,8 @@ export default class ElasticSearch extends Service { }); const existingDocuments = body.docs; - const esRequest = { - body: [], + const esRequest: estypes.BulkRequest = { + operations: [], refresh, timeout, }; @@ -2788,13 +2800,13 @@ export default class ElasticSearch extends Service { const doc = existingDocuments[i]; if (!("error" in doc) && doc?.found) { - esRequest.body.push({ + esRequest.operations.push({ index: { _id: document._id, _index: alias, }, }); - esRequest.body.push(document._source); + esRequest.operations.push(document._source); toImport.push(document); } else { diff --git a/test/core/backend/BackendStorage.test.js b/test/core/backend/BackendStorage.test.js index bbe3872ca6..0978a5deec 100644 --- a/test/core/backend/BackendStorage.test.js +++ b/test/core/backend/BackendStorage.test.js @@ -34,7 +34,13 @@ describe("Backend", () => { should(client.connectionPool.connections[0].url.toString()).be.eql( "http://es:9200/", ); - should(client.helpers[Object.getOwnPropertySymbols(client.helpers).find((s) => s.description === 'max retries')]).be.eql(42); + should( + client.helpers[ + Object.getOwnPropertySymbols(client.helpers).find( + (s) => s.description === "max retries", + ) + ], + ).be.eql(42); }); }); diff --git a/test/service/storage/elasticsearch.test.js b/test/service/storage/elasticsearch.test.js index aa9603e892..1db27690b5 100644 --- a/test/service/storage/elasticsearch.test.js +++ b/test/service/storage/elasticsearch.test.js @@ -105,23 +105,21 @@ describe("Test: ElasticSearch service", () => { describe("#stats", () => { beforeEach(() => { elasticsearch._client.indices.stats.resolves({ - body: { - indices: { - "%kuzzle.users": { - total: { docs: { count: 1 }, store: { size_in_bytes: 10 } }, - }, - "&test-index._kuzzle_keep": { - total: { docs: { count: 0 }, store: { size_in_bytes: 10 } }, - }, - "&test-index.test-collection": { - total: { docs: { count: 2 }, store: { size_in_bytes: 20 } }, - }, - ".kibana": { - total: { docs: { count: 2 }, store: { size_in_bytes: 42 } }, - }, - ".geoip_databases": { - /* This index nativement do not return anything on index:stats call */ - }, + indices: { + "%kuzzle.users": { + total: { docs: { count: 1 }, store: { size_in_bytes: 10 } }, + }, + "&test-index._kuzzle_keep": { + total: { docs: { count: 0 }, store: { size_in_bytes: 10 } }, + }, + "&test-index.test-collection": { + total: { docs: { count: 2 }, store: { size_in_bytes: 20 } }, + }, + ".kibana": { + total: { docs: { count: 2 }, store: { size_in_bytes: 42 } }, + }, + ".geoip_databases": { + /* This index nativement do not return anything on index:stats call */ }, }, }); @@ -187,15 +185,13 @@ describe("Test: ElasticSearch service", () => { ); elasticsearch._client.scroll.resolves({ - body: { - _scroll_id: "azerty", - hits: { - hits: [ - { _index: "&foo.foo", _id: "foo", _source: {} }, - { _index: "&bar.bar", _id: "bar", _source: {} }, - ], - total: { value: 1000 }, - }, + _scroll_id: "azerty", + hits: { + hits: [ + { _index: "&foo.foo", _id: "foo", _source: {} }, + { _index: "&bar.bar", _id: "bar", _source: {} }, + ], + total: { value: 1000 }, }, }); @@ -285,16 +281,14 @@ describe("Test: ElasticSearch service", () => { ); elasticsearch._client.scroll.resolves({ - body: { - hits: { - hits: [ - { _index: "&foo.foo", _id: "foo", _source: {} }, - { _index: "&bar.bar", _id: "bar", _source: {} }, - ], - total: { value: 1000 }, - }, - _scroll_id: "azerty", + hits: { + hits: [ + { _index: "&foo.foo", _id: "foo", _source: {} }, + { _index: "&bar.bar", _id: "bar", _source: {} }, + ], + total: { value: 1000 }, }, + _scroll_id: "azerty", }); elasticsearch._getAliasFromIndice = sinon.stub(); @@ -389,10 +383,8 @@ describe("Test: ElasticSearch service", () => { ); elasticsearch._client.scroll.resolves({ - body: { - hits: { hits: [], total: { value: 1000 } }, - _scroll_id: "azerty", - }, + hits: { hits: [], total: { value: 1000 } }, + _scroll_id: "azerty", }); await elasticsearch.scroll("scroll-id", { scrollTTL: null }); @@ -446,48 +438,46 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.search.firstCall.args[0]).match({ index: "@&nyc-open-data.yellow-taxi,@&nyc-open-data.red-taxi,@&nyc-close-data.green-taxi,@&nyc-close-data.blue-taxi", - body: { query: { match_all: {} } }, + query: { match_all: {} }, from: undefined, size: undefined, scroll: undefined, - trackTotalHits: true, + track_total_hits: true, }); } }); it("should be able to search documents", async () => { elasticsearch._client.search.resolves({ - body: { - aggregations: { some: "aggregs" }, - body: searchBody, - hits: { - hits: [ - { - _id: "liia", - _index: indice, - _source: { country: "Nepal" }, - _score: 42, - highlight: "highlight", - inner_hits: { - inner_name: { - hits: { - hits: [ - { - _id: "nestedLiia", - _source: { city: "Kathmandu" }, - }, - ], - }, + ...searchBody, + aggregations: { some: "aggregs" }, + hits: { + hits: [ + { + _id: "liia", + _index: indice, + _source: { country: "Nepal" }, + _score: 42, + highlight: "highlight", + inner_hits: { + inner_name: { + hits: { + hits: [ + { + _id: "nestedLiia", + _source: { city: "Kathmandu" }, + }, + ], }, }, - other: "thing", }, - ], - total: { value: 1 }, - }, - suggest: { some: "suggest" }, - _scroll_id: "i-am-scroll-id", + other: "thing", + }, + ], + total: { value: 1 }, }, + suggest: { some: "suggest" }, + _scroll_id: "i-am-scroll-id", }); elasticsearch._getAliasFromIndice = sinon.stub(); @@ -501,11 +491,11 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.search.firstCall.args[0]).match({ index: alias, - body: { query: { match_all: {} } }, + query: { match_all: {} }, from: undefined, size: undefined, scroll: undefined, - trackTotalHits: true, + track_total_hits: true, }); should(kuzzle.ask).calledWith( @@ -548,10 +538,8 @@ describe("Test: ElasticSearch service", () => { it("should be able to search with from/size and scroll arguments", async () => { elasticsearch._client.search.resolves({ - body: { - hits: { hits: [], total: { value: 0 } }, - _scroll_id: "i-am-scroll-id", - }, + hits: { hits: [], total: { value: 0 } }, + _scroll_id: "i-am-scroll-id", }); await elasticsearch.search( @@ -560,12 +548,12 @@ describe("Test: ElasticSearch service", () => { ); should(elasticsearch._client.search.firstCall.args[0]).match({ - body: searchBody, + ...searchBody, from: 0, index: alias, scroll: "30s", size: 1, - trackTotalHits: true, + track_total_hits: true, }); should(kuzzle.ask).calledWith( @@ -582,9 +570,7 @@ describe("Test: ElasticSearch service", () => { it("should be able to search on ES alias with invalid collection name", async () => { elasticsearch._client.search.resolves({ - body: { - hits: { hits: [], total: { value: 0 } }, - }, + hits: { hits: [], total: { value: 0 } }, }); await elasticsearch.search({ @@ -594,9 +580,9 @@ describe("Test: ElasticSearch service", () => { }); should(elasticsearch._client.search.firstCall.args[0]).match({ - body: searchBody, + ...searchBody, index: "@&main.kuzzleData", - trackTotalHits: true, + track_total_hits: true, }); }); @@ -625,9 +611,7 @@ describe("Test: ElasticSearch service", () => { it("should not save the scrollId in the cache if not present in response", async () => { elasticsearch._client.search.resolves({ - body: { - hits: { hits: [], total: { value: 0 } }, - }, + hits: { hits: [], total: { value: 0 } }, }); await elasticsearch.search({ index, collection, searchBody: {} }); @@ -654,11 +638,9 @@ describe("Test: ElasticSearch service", () => { describe("#get", () => { it("should allow getting a single document", () => { elasticsearch._client.get.resolves({ - body: { - _id: "liia", - _source: { city: "Kathmandu" }, - _version: 1, - }, + _id: "liia", + _source: { city: "Kathmandu" }, + _version: 1, }); const promise = elasticsearch.get(index, collection, "liia"); @@ -703,17 +685,15 @@ describe("Test: ElasticSearch service", () => { describe("#mGet", () => { it("should allow getting multiples documents", () => { elasticsearch._client.mget.resolves({ - body: { - docs: [ - { - _id: "liia", - found: true, - _source: { city: "Kathmandu" }, - _version: 1, - }, - { _id: "mhery", found: false }, - ], - }, + docs: [ + { + _id: "liia", + found: true, + _source: { city: "Kathmandu" }, + _version: 1, + }, + { _id: "mhery", found: false }, + ], }); const promise = elasticsearch.mGet(index, collection, ["liia", "mhery"]); @@ -753,21 +733,17 @@ describe("Test: ElasticSearch service", () => { describe("#mExists", () => { it("should allow getting multiples existing documents", () => { elasticsearch._client.mget.resolves({ - body: { - docs: [ - { _id: "foo", found: true }, - { _id: "bar", found: false }, - ], - }, + docs: [ + { _id: "foo", found: true }, + { _id: "bar", found: false }, + ], }); const promise = elasticsearch.mExists(index, collection, ["foo", "bar"]); return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ - body: { - docs: [{ _id: "foo" }, { _id: "bar" }], - }, + docs: [{ _id: "foo" }, { _id: "bar" }], index: alias, }); @@ -801,9 +777,7 @@ describe("Test: ElasticSearch service", () => { }, }; elasticsearch._client.count.resolves({ - body: { - count: 42, - }, + count: 42, }); const promise = elasticsearch.count(index, collection, filter); @@ -836,11 +810,9 @@ describe("Test: ElasticSearch service", () => { describe("#create", () => { it("should allow creating document an ID is provided", () => { elasticsearch._client.index.resolves({ - body: { - _id: "liia", - _version: 1, - _source: { city: "Kathmandu" }, - }, + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, }); const promise = elasticsearch.create( @@ -853,7 +825,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, - body: { + document: { city: "Kathmandu", _kuzzle_info: { author: "aschen", @@ -875,11 +847,9 @@ describe("Test: ElasticSearch service", () => { it("should create a document when no ID is provided", () => { elasticsearch._client.index.resolves({ - body: { - _id: "mehry", - _version: 1, - _source: { city: "Panipokari" }, - }, + _id: "mehry", + _version: 1, + _source: { city: "Panipokari" }, }); const promise = elasticsearch.create(index, collection, { @@ -889,7 +859,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, - body: { + document: { city: "Panipokari", _kuzzle_info: { author: null, @@ -910,12 +880,10 @@ describe("Test: ElasticSearch service", () => { describe("#createOrReplace", () => { beforeEach(() => { elasticsearch._client.index.resolves({ - body: { - _id: "liia", - _version: 1, - _source: { city: "Kathmandu" }, - result: "created", - }, + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + result: "created", }); }); @@ -931,7 +899,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, - body: { + document: { city: "Kathmandu", _kuzzle_info: { author: "aschen", @@ -965,7 +933,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, - body: { + document: { city: "Kathmandu", _kuzzle_info: undefined, }, @@ -1001,12 +969,10 @@ describe("Test: ElasticSearch service", () => { describe("#update", () => { beforeEach(() => { elasticsearch._client.update.resolves({ - body: { - _id: "liia", - _version: 1, - get: { - _source: { city: "Panipokari" }, - }, + _id: "liia", + _version: 1, + get: { + _source: { city: "Panipokari" }, }, }); }); @@ -1019,13 +985,11 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: null, - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, }, }, id: "liia", @@ -1056,13 +1020,11 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: "aschen", - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "aschen", }, }, id: "liia", @@ -1108,13 +1070,11 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: "oh noes", - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "oh noes", }, }, id: "liia", @@ -1129,13 +1089,11 @@ describe("Test: ElasticSearch service", () => { describe("#upsert", () => { beforeEach(() => { elasticsearch._client.update.resolves({ - body: { - _id: "liia", - _version: 2, - result: "updated", - get: { - _source: { city: "Panipokari" }, - }, + _id: "liia", + _version: 2, + result: "updated", + get: { + _source: { city: "Panipokari" }, }, }); }); @@ -1147,19 +1105,17 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: null, - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, }, - upsert: { - _kuzzle_info: { - author: null, - createdAt: timestamp, - }, + }, + upsert: { + _kuzzle_info: { + author: null, + createdAt: timestamp, }, }, id: "liia", @@ -1191,20 +1147,18 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: null, - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, }, - upsert: { - oh: "noes", - _kuzzle_info: { - author: null, - createdAt: timestamp, - }, + }, + upsert: { + oh: "noes", + _kuzzle_info: { + author: null, + createdAt: timestamp, }, }, id: "liia", @@ -1225,13 +1179,11 @@ describe("Test: ElasticSearch service", () => { it('should return the right "_created" result on a document creation', async () => { elasticsearch._client.update.resolves({ - body: { - _id: "liia", - _version: 1, - result: "created", - get: { - _source: { city: "Panipokari" }, - }, + _id: "liia", + _version: 1, + result: "created", + get: { + _source: { city: "Panipokari" }, }, }); @@ -1247,20 +1199,18 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: null, - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, }, - upsert: { - oh: "noes", - _kuzzle_info: { - author: null, - createdAt: timestamp, - }, + }, + upsert: { + oh: "noes", + _kuzzle_info: { + author: null, + createdAt: timestamp, }, }, id: "liia", @@ -1290,19 +1240,17 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: "aschen", - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "aschen", }, - upsert: { - _kuzzle_info: { - author: "aschen", - createdAt: timestamp, - }, + }, + upsert: { + _kuzzle_info: { + author: "aschen", + createdAt: timestamp, }, }, id: "liia", @@ -1344,19 +1292,17 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.update).be.calledWithMatch({ index: alias, - body: { - doc: { - city: "Panipokari", - _kuzzle_info: { - updatedAt: timestamp, - updater: "oh noes", - }, + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "oh noes", }, - upsert: { - _kuzzle_info: { - author: "oh noes", - createdAt: timestamp, - }, + }, + upsert: { + _kuzzle_info: { + author: "oh noes", + createdAt: timestamp, }, }, id: "liia", @@ -1371,13 +1317,11 @@ describe("Test: ElasticSearch service", () => { describe("#replace", () => { beforeEach(() => { elasticsearch._client.index.resolves({ - body: { - _id: "liia", - _version: 1, - _source: { city: "Kathmandu" }, - }, + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, }); - elasticsearch._client.exists.resolves({ body: true }); + elasticsearch._client.exists.resolves(true); }); it("should support replace capability", () => { @@ -1389,7 +1333,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, id: "liia", - body: { + document: { city: "Kathmandu", _kuzzle_info: { author: null, @@ -1422,7 +1366,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, id: "liia", - body: { + document: { city: "Kathmandu", _kuzzle_info: { author: "aschen", @@ -1443,7 +1387,7 @@ describe("Test: ElasticSearch service", () => { }); it("should throw a NotFoundError Exception if document already exists", () => { - elasticsearch._client.exists.resolves({ body: false }); + elasticsearch._client.exists.resolves(false); const promise = elasticsearch.replace(index, collection, "liia", { city: "Kathmandu", @@ -1554,7 +1498,7 @@ describe("Test: ElasticSearch service", () => { }); elasticsearch._client.indices.refresh.resolves({ - body: { _shards: 1 }, + _shards: 1, }); }); @@ -1648,15 +1592,13 @@ describe("Test: ElasticSearch service", () => { elasticsearch._getAllDocumentsFromQuery.restore(); elasticsearch._client.search.resolves({ - body: { - hits: { - hits: [], - total: { - value: 99999, - }, + hits: { + hits: [], + total: { + value: 99999, }, - _scroll_id: "foobar", }, + _scroll_id: "foobar", }); kuzzle.config.limits.documentsFetchCount = 2; @@ -1683,23 +1625,19 @@ describe("Test: ElasticSearch service", () => { }; request = { - body: { - query, - script: { - params: { bar: "foo" }, - source: "ctx._source.bar = params['bar'];", - }, + query, + script: { + params: { bar: "foo" }, + source: "ctx._source.bar = params['bar'];", }, index: alias, refresh: false, }; elasticsearch._client.updateByQuery.resolves({ - body: { - total: 42, - updated: 42, - failures: [], - }, + total: 42, + updated: 42, + failures: [], }); }); @@ -1749,11 +1687,9 @@ describe("Test: ElasticSearch service", () => { it("should reject if some failures occur", () => { elasticsearch._client.updateByQuery.resolves({ - body: { - total: 3, - updated: 2, - failures: [{ shardId: 42, reason: "error", foo: "bar" }], - }, + total: 3, + updated: 2, + failures: [{ shardId: 42, reason: "error", foo: "bar" }], }); const promise = elasticsearch.bulkUpdateByQuery( @@ -1779,11 +1715,17 @@ describe("Test: ElasticSearch service", () => { ]); elasticsearch._client.deleteByQuery.resolves({ - body: { - total: 2, - deleted: 1, - failures: [{ shardId: 42, reason: "error", foo: "bar" }], - }, + total: 2, + deleted: 1, + failures: [ + { + id: "_id2", + cause: { + reason: "error", + }, + foo: "bar", + }, + ], }); }); @@ -1794,16 +1736,16 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { filter: "term" } }, + query: { filter: "term" }, scroll: "5s", from: undefined, - size: 1000, + max_docs: 1000, refresh: undefined, }); should(elasticsearch._getAllDocumentsFromQuery).be.calledWithMatch({ index: alias, - body: { query: { filter: "term" } }, + query: { filter: "term" }, scroll: "5s", from: undefined, size: 1000, @@ -1817,7 +1759,7 @@ describe("Test: ElasticSearch service", () => { ], total: 2, deleted: 1, - failures: [{ shardId: 42, reason: "error" }], + failures: [{ id: "_id2", reason: "error" }], }); }); @@ -1831,15 +1773,15 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { filter: "term" } }, - size: 3, + query: { filter: "term" }, + max_docs: 3, refresh: true, }); should(result).match({ total: 2, deleted: 1, - failures: [{ shardId: 42, reason: "error" }], + failures: [{ id: "_id2", reason: "error" }], }); }); @@ -1853,10 +1795,10 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { filter: "term" } }, + query: { filter: "term" }, scroll: "5s", from: undefined, - size: 1000, + max_docs: 1000, refresh: undefined, }); @@ -1866,7 +1808,7 @@ describe("Test: ElasticSearch service", () => { documents: [], total: 2, deleted: 1, - failures: [{ shardId: 42, reason: "error" }], + failures: [{ id: "_id2", reason: "error" }], }); }); @@ -1902,15 +1844,13 @@ describe("Test: ElasticSearch service", () => { elasticsearch._getAllDocumentsFromQuery.restore(); elasticsearch._client.search.resolves({ - body: { - hits: { - hits: [], - total: { - value: 99999, - }, + hits: { + hits: [], + total: { + value: 99999, }, - _scroll_id: "foobar", }, + _scroll_id: "foobar", }); kuzzle.config.limits.documentsFetchCount = 2; @@ -1926,19 +1866,14 @@ describe("Test: ElasticSearch service", () => { describe("#deleteFields", () => { beforeEach(() => { elasticsearch._client.get.resolves({ - body: { - _id: "liia", - _version: 1, - _source: { city: "Kathmandu", useless: "somevalue" }, - }, + _id: "liia", + _version: 1, + _source: { city: "Kathmandu", useless: "somevalue" }, }); elasticsearch._client.index.resolves({ - body: { - _id: "liia", - _version: 2, - _source: { city: "Kathmandu" }, - }, + _id: "liia", + _version: 2, }); }); @@ -1956,7 +1891,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, id: "liia", - body: { + document: { city: "Kathmandu", _kuzzle_info: { updatedAt: timestamp, @@ -1992,7 +1927,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.index).be.calledWithMatch({ index: alias, id: "liia", - body: { + document: { city: "Kathmandu", _kuzzle_info: { updatedAt: timestamp, @@ -2065,13 +2000,13 @@ describe("Test: ElasticSearch service", () => { .onCall(1) .resolves(2); - elasticsearch._client.search.callsArgWith(1, null, { - body: { hits: hits1 }, + elasticsearch._client.search.resolves({ + hits: hits1, _scroll_id: "scroll-id", }); - elasticsearch._client.scroll.callsArgWith(1, null, { - body: { hits: hits2 }, + elasticsearch._client.scroll.resolves({ + hits: hits2, _scroll_id: "scroll-id", }); @@ -2084,9 +2019,9 @@ describe("Test: ElasticSearch service", () => { should(result).match([1, 2]); - should(elasticsearch._client.search.getCall(0).args[0]).match({ + should(elasticsearch._client.search.firstCall.args[0]).match({ index: alias, - body: { query: { match: 21 } }, + query: { match: 21 }, scroll: "5s", from: 0, size: 10, @@ -2113,9 +2048,10 @@ describe("Test: ElasticSearch service", () => { describe("#createIndex", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [{ alias: alias }, { alias: "@%nepali.liia" }], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: alias }, + { alias: "@%nepali.liia" }, + ]); sinon.stub(elasticsearch, "_createHiddenCollection").resolves(); }); @@ -2178,7 +2114,7 @@ describe("Test: ElasticSearch service", () => { sinon.stub(elasticsearch, "_hasHiddenCollection").resolves(false); sinon.stub(elasticsearch, "deleteCollection").resolves(); sinon.stub(elasticsearch, "_getAvailableIndice").resolves(indice); - sinon.stub(elasticsearch, "_getWaitForActiveShards").returns("1"); + sinon.stub(elasticsearch, "_getWaitForActiveShards").returns(1); }); afterEach(() => { @@ -2200,15 +2136,13 @@ describe("Test: ElasticSearch service", () => { }); should(elasticsearch._client.indices.create).be.calledWithMatch({ index: indice, - body: { - aliases: { [alias]: {} }, - mappings: { - dynamic: elasticsearch.config.commonMapping.dynamic, - _meta: elasticsearch.config.commonMapping._meta, - properties: mappings.properties, - }, - settings: { index: { blocks: { write: true } } }, + aliases: { [alias]: {} }, + mappings: { + dynamic: elasticsearch.config.commonMapping.dynamic, + _meta: elasticsearch.config.commonMapping._meta, + properties: mappings.properties, }, + settings: { index: { blocks: { write: true } } }, }); should(result).be.null(); @@ -2238,13 +2172,11 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.indices.create).be.calledWithMatch({ index: indice, - body: { - aliases: { [alias]: {} }, - mappings: { - dynamic: "true", - _meta: { some: "meta" }, - properties: elasticsearch.config.commonMapping.properties, - }, + aliases: { [alias]: {} }, + mappings: { + dynamic: "true", + _meta: { some: "meta" }, + properties: elasticsearch.config.commonMapping.properties, }, }); @@ -2434,7 +2366,7 @@ describe("Test: ElasticSearch service", () => { }, }; - should(esReq.body.mappings).eql(expectedMapping); + should(esReq.mappings).eql(expectedMapping); }); it("should reject if the index name is invalid", () => { @@ -2464,7 +2396,7 @@ describe("Test: ElasticSearch service", () => { await elasticsearch.createCollection(index, collection); const esReq = elasticsearch._client.indices.create.firstCall.args[0]; - should(esReq.body.settings).eql(elasticsearch.config.defaultSettings); + should(esReq.settings).eql(elasticsearch.config.defaultSettings); }); it("should use provided settings if provided", async () => { @@ -2481,7 +2413,7 @@ describe("Test: ElasticSearch service", () => { await elasticsearch.createCollection(index, collection, { settings }); const esReq = elasticsearch._client.indices.create.firstCall.args[0]; - should(esReq.body.settings).eql(settings); + should(esReq.settings).eql(settings); }); it("should use partially provided settings", async () => { @@ -2498,7 +2430,7 @@ describe("Test: ElasticSearch service", () => { const esReq = elasticsearch._client.indices.create.firstCall.args[0]; - should(esReq.body.settings).eql({ + should(esReq.settings).eql({ number_of_replicas: 1, number_of_shards: 66, }); @@ -2526,15 +2458,13 @@ describe("Test: ElasticSearch service", () => { describe("#getMapping", () => { beforeEach(() => { elasticsearch._client.indices.getMapping.resolves({ - body: { - [indice]: { - mappings: { - dynamic: true, - _meta: { lang: "npl" }, - properties: { - city: { type: "keyword" }, - _kuzzle_info: { properties: { author: { type: "keyword" } } }, - }, + [indice]: { + mappings: { + dynamic: true, + _meta: { lang: "npl" }, + properties: { + city: { type: "keyword" }, + _kuzzle_info: { properties: { author: { type: "keyword" } } }, }, }, }, @@ -2609,16 +2539,14 @@ describe("Test: ElasticSearch service", () => { beforeEach(() => { oldSettings = { - body: { - [indice]: { - settings: { - index: { - creation_date: Date.now(), - provided_name: "hello_world", - uuid: "some-u-u-i-d", - version: { no: 4242 }, - blocks: { write: false }, - }, + [indice]: { + settings: { + index: { + creation_date: Date.now(), + provided_name: "hello_world", + uuid: "some-u-u-i-d", + version: { no: 4242 }, + blocks: { write: false }, }, }, }, @@ -2774,12 +2702,10 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.indices.putMapping).be.calledWithMatch({ index: alias, - body: { - dynamic: "strict", - _meta: { meta: "data" }, - properties: { - name: { type: "keyword" }, - }, + dynamic: "strict", + _meta: { meta: "data" }, + properties: { + name: { type: "keyword" }, }, }); @@ -2845,10 +2771,8 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.indices.putMapping).be.calledWithMatch({ index: alias, - body: { - dynamic: "false", - _meta: { other: "meta" }, - }, + dynamic: "false", + _meta: { other: "meta" }, }); should(result).match({ @@ -2952,7 +2876,6 @@ describe("Test: ElasticSearch service", () => { await elasticsearch.updateSearchIndex(index, collection); should(elasticsearch._client.updateByQuery).be.calledWithMatch({ - body: {}, conflicts: "proceed", index: alias, refresh: true, @@ -2975,14 +2898,12 @@ describe("Test: ElasticSearch service", () => { elasticsearch.getMapping = sinon.stub().resolves(existingMapping); elasticsearch._client.indices.getSettings.resolves({ - body: { - "&nyc-open-data.yellow-taxi": { - settings: { - analysis: { - analyzers: { - custom_analyzer: { - type: "simple", - }, + "&nyc-open-data.yellow-taxi": { + settings: { + analysis: { + analyzers: { + custom_analyzer: { + type: "simple", }, }, }, @@ -3006,20 +2927,18 @@ describe("Test: ElasticSearch service", () => { }); should(elasticsearch._client.indices.create).be.calledWithMatch({ index: indice, - body: { - aliases: { [alias]: {} }, - mappings: { - dynamic: "false", - properties: { - name: { type: "keyword" }, - }, + aliases: { [alias]: {} }, + mappings: { + dynamic: "false", + properties: { + name: { type: "keyword" }, }, - settings: { - analysis: { - analyzers: { - custom_analyzer: { - type: "simple", - }, + }, + settings: { + analysis: { + analyzers: { + custom_analyzer: { + type: "simple", }, }, }, @@ -3260,14 +3179,12 @@ describe("Test: ElasticSearch service", () => { describe("#listCollections", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@&nepali.mehry" }, - { alias: "@&nepali.liia" }, - { alias: "@&nyc-open-data.taxi" }, - { alias: "@&nepali._kuzzle_keep" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@&nepali.mehry" }, + { alias: "@&nepali.liia" }, + { alias: "@&nyc-open-data.taxi" }, + { alias: "@&nepali._kuzzle_keep" }, + ]); }); it("should allow listing all available collections", () => { @@ -3279,13 +3196,11 @@ describe("Test: ElasticSearch service", () => { }); it("should not list unauthorized collections", () => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@%nepali.mehry" }, - { alias: "@%nepali.liia" }, - { alias: "@%nyc-open-data.taxi" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@%nepali.mehry" }, + { alias: "@%nepali.liia" }, + { alias: "@%nyc-open-data.taxi" }, + ]); const promise = elasticsearch.listCollections("nepali"); @@ -3307,13 +3222,11 @@ describe("Test: ElasticSearch service", () => { describe("#listIndexes", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@&nepali.mehry" }, - { alias: "@&nepali.liia" }, - { alias: "@&nyc-open-data.taxi" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@&nepali.mehry" }, + { alias: "@&nepali.liia" }, + { alias: "@&nyc-open-data.taxi" }, + ]); }); it("should allow listing all available indexes", () => { @@ -3329,14 +3242,12 @@ describe("Test: ElasticSearch service", () => { }); it("should not list unauthorized indexes", () => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@%nepali.mehry" }, - { alias: "@%nepali.liia" }, - { alias: "@%nyc-open-data.taxi" }, - { alias: "@&vietnam.lfiduras" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@%nepali.mehry" }, + { alias: "@%nepali.liia" }, + { alias: "@%nyc-open-data.taxi" }, + { alias: "@&vietnam.lfiduras" }, + ]); const promise = elasticsearch.listIndexes(); @@ -3358,13 +3269,11 @@ describe("Test: ElasticSearch service", () => { describe("#listAliases", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { index: "&nepalu.mehry", alias: "@&nepali.mehry" }, - { index: "&nepali.lia", alias: "@&nepali.liia" }, - { index: "&nyc-open-data.taxi", alias: "@&nyc-open-data.taxi" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { index: "&nepalu.mehry", alias: "@&nepali.mehry" }, + { index: "&nepali.lia", alias: "@&nepali.liia" }, + { index: "&nyc-open-data.taxi", alias: "@&nyc-open-data.taxi" }, + ]); }); it("should allow listing all available aliases", async () => { @@ -3397,14 +3306,12 @@ describe("Test: ElasticSearch service", () => { }); it("should not list unauthorized aliases", async () => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { index: "%nepalu.mehry", alias: "@%nepali.mehry" }, - { index: "%nepali.lia", alias: "@%nepali.liia" }, - { index: "%nyc-open-data.taxi", alias: "@%nyc-open-data.taxi" }, - { index: "&vietnam.lfiduras", alias: "@&vietnam.lfiduras" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { index: "%nepalu.mehry", alias: "@%nepali.mehry" }, + { index: "%nepali.lia", alias: "@%nepali.liia" }, + { index: "%nyc-open-data.taxi", alias: "@%nyc-open-data.taxi" }, + { index: "&vietnam.lfiduras", alias: "@&vietnam.lfiduras" }, + ]); const result = await elasticsearch.listAliases(); @@ -3431,14 +3338,12 @@ describe("Test: ElasticSearch service", () => { describe("#deleteIndexes", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@&nepali.mehry", index: "&nepali.mehry" }, - { alias: "@&nepali.liia", index: "&nepali.liia" }, - { alias: "@&do-not.delete", index: "&do-not.delete" }, - { alias: "@&nyc-open-data.taxi", index: "&nyc-open-data.taxi" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@&nepali.mehry", index: "&nepali.mehry" }, + { alias: "@&nepali.liia", index: "&nepali.liia" }, + { alias: "@&do-not.delete", index: "&do-not.delete" }, + { alias: "@&nyc-open-data.taxi", index: "&nyc-open-data.taxi" }, + ]); }); it("should allow to deletes multiple indexes", () => { @@ -3454,14 +3359,12 @@ describe("Test: ElasticSearch service", () => { }); it("should not delete unauthorized indexes", () => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@&nepali.mehry", index: "&nepali.mehry" }, - { alias: "@&nepali.liia", index: "&nepali.liia" }, - { alias: "@&do-not.delete", index: "&do-not.delete" }, - { alias: "@%nyc-open-data.taxi", index: "%nyc-open-data.taxi" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@&nepali.mehry", index: "&nepali.mehry" }, + { alias: "@&nepali.liia", index: "&nepali.liia" }, + { alias: "@&do-not.delete", index: "&do-not.delete" }, + { alias: "@%nyc-open-data.taxi", index: "%nyc-open-data.taxi" }, + ]); const promise = elasticsearch.deleteIndexes(["nepali", "nyc-open-data"]); @@ -3540,7 +3443,7 @@ describe("Test: ElasticSearch service", () => { describe("#refreshCollection", () => { it("should send a valid request to es client", () => { elasticsearch._client.indices.refresh.resolves({ - body: { _shards: "shards" }, + _shards: "shards", }); const promise = elasticsearch.refreshCollection(index, collection); @@ -3569,9 +3472,7 @@ describe("Test: ElasticSearch service", () => { describe("#exists", () => { it("should have document exists capability", () => { - elasticsearch._client.exists.resolves({ - body: true, - }); + elasticsearch._client.exists.resolves(true); const promise = elasticsearch.exists(index, collection, "liia"); @@ -3686,9 +3587,7 @@ describe("Test: ElasticSearch service", () => { it("should do a mGet request if we need to get some documents", () => { elasticsearch._client.mget.resolves({ - body: { - docs: [], - }, + docs: [], }); const promise = elasticsearch.mCreate( @@ -3700,12 +3599,12 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ index: alias, - body: { docs: [{ _id: "liia", _source: false }] }, + docs: [{ _id: "liia", _source: false }], }); const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _index: alias } }, @@ -3730,9 +3629,7 @@ describe("Test: ElasticSearch service", () => { it("should reject already existing documents", () => { elasticsearch._client.mget.resolves({ - body: { - docs: [{ _id: "liia", found: true }], - }, + docs: [{ _id: "liia", found: true }], }); const promise = elasticsearch.mCreate( @@ -3744,12 +3641,12 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ index: alias, - body: { docs: [{ _id: "liia", _source: false }] }, + docs: [{ _id: "liia", _source: false }], }); const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, ], @@ -3790,7 +3687,7 @@ describe("Test: ElasticSearch service", () => { const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _index: alias } }, @@ -3827,7 +3724,7 @@ describe("Test: ElasticSearch service", () => { const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _index: alias } }, @@ -3886,7 +3783,7 @@ describe("Test: ElasticSearch service", () => { const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias, _id: "mehry" } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _index: alias, _id: "liia" } }, @@ -3919,7 +3816,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias, _id: "mehry" } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _index: alias, _id: "liia" } }, @@ -3955,7 +3852,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias, _id: "mehry" } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _index: alias, _id: "liia" } }, @@ -3989,7 +3886,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { const esRequest = { index: alias, - body: [ + operations: [ { index: { _index: alias, _id: "mehry" } }, { city: "Kathmandu" }, { index: { _index: alias, _id: "liia" } }, @@ -4063,7 +3960,7 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { const esRequest = { index: alias, - body: [ + operations: [ { update: { _index: alias, @@ -4125,7 +4022,7 @@ describe("Test: ElasticSearch service", () => { return promise.then(() => { const esRequest = { index: alias, - body: [ + operations: [ { update: { _index: alias, _id: "mehry", retry_on_conflict: 2 } }, { doc: { city: "Kathmandu", ...kuzzleMeta }, _source: true }, { update: { _index: alias, _id: "liia", retry_on_conflict: 2 } }, @@ -4157,7 +4054,7 @@ describe("Test: ElasticSearch service", () => { return promise.then(() => { const esRequest = { index: alias, - body: [ + operations: [ { update: { _index: alias, @@ -4222,7 +4119,7 @@ describe("Test: ElasticSearch service", () => { }; esRequest = { - body: [ + operations: [ { update: { _index: alias, @@ -4322,7 +4219,7 @@ describe("Test: ElasticSearch service", () => { it("should handle default values for upserted documents", async () => { documents[1].default = { country: "Vietnam" }; - esRequest.body[3].upsert.country = "Vietnam"; + esRequest.operations[3].upsert.country = "Vietnam"; toImport[1]._source.default.country = "Vietnam"; const result = await elasticsearch.mUpsert(index, collection, documents); @@ -4353,8 +4250,8 @@ describe("Test: ElasticSearch service", () => { it("should allow additional options", async () => { kuzzleUpdateMeta._kuzzle_info.updater = "aschen"; kuzzleCreateMeta._kuzzle_info.author = "aschen"; - esRequest.body[0].update.retry_on_conflict = 42; - esRequest.body[2].update.retry_on_conflict = 42; + esRequest.operations[0].update.retry_on_conflict = 42; + esRequest.operations[2].update.retry_on_conflict = 42; esRequest.refresh = "wait_for"; esRequest.timeout = "10m"; @@ -4374,7 +4271,7 @@ describe("Test: ElasticSearch service", () => { it("should add documents without ID to rejected documents", async () => { documents[1] = { changes: { city: "Ho Chi Minh City" } }; - esRequest.body = esRequest.body.slice(0, 2); + esRequest.operations = esRequest.operations.slice(0, 2); toImport = toImport.slice(0, 1); const rejected = [ { @@ -4446,12 +4343,10 @@ describe("Test: ElasticSearch service", () => { elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); elasticsearch._client.mget.resolves({ - body: { - docs: [ - { _id: "mehry", found: true }, - { _id: "liia", found: true }, - ], - }, + docs: [ + { _id: "mehry", found: true }, + { _id: "liia", found: true }, + ], }); }); @@ -4461,18 +4356,16 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ index: alias, - body: { - docs: [ - { _id: "mehry", _source: false }, - { _id: "liia", _source: false }, - ], - }, + docs: [ + { _id: "mehry", _source: false }, + { _id: "liia", _source: false }, + ], }); const esRequest = { refresh: undefined, timeout: undefined, - body: [ + operations: [ { index: { _id: "mehry", _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _id: "liia", _index: alias } }, @@ -4495,12 +4388,10 @@ describe("Test: ElasticSearch service", () => { it("should add not found documents to rejected", () => { elasticsearch._client.mget.resolves({ - body: { - docs: [ - { _id: "mehry", found: true }, - { _id: "liia", found: false }, - ], - }, + docs: [ + { _id: "mehry", found: true }, + { _id: "liia", found: false }, + ], }); const promise = elasticsearch.mReplace(index, collection, documents); @@ -4508,18 +4399,16 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ index: alias, - body: { - docs: [ - { _id: "mehry", _source: false }, - { _id: "liia", _source: false }, - ], - }, + docs: [ + { _id: "mehry", _source: false }, + { _id: "liia", _source: false }, + ], }); const esRequest = { refresh: undefined, timeout: undefined, - body: [ + operations: [ { index: { _id: "mehry", _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, ], @@ -4553,9 +4442,7 @@ describe("Test: ElasticSearch service", () => { { body: { city: "Ho Chi Minh City" } }, ]; elasticsearch._client.mget.resolves({ - body: { - docs: [{ _id: "mehry", found: true }], - }, + docs: [{ _id: "mehry", found: true }], }); const promise = elasticsearch.mReplace(index, collection, documents); @@ -4563,15 +4450,13 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ index: alias, - body: { - docs: [{ _id: "mehry", _source: false }], - }, + docs: [{ _id: "mehry", _source: false }], }); const esRequest = { refresh: undefined, timeout: undefined, - body: [ + operations: [ { index: { _id: "mehry", _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, ], @@ -4609,7 +4494,7 @@ describe("Test: ElasticSearch service", () => { const esRequest = { refresh: "wait_for", timeout: "10m", - body: [ + operations: [ { index: { _id: "mehry", _index: alias } }, { city: "Kathmandu", ...kuzzleMeta }, { index: { _id: "liia", _index: alias } }, @@ -4643,15 +4528,13 @@ describe("Test: ElasticSearch service", () => { ]); elasticsearch._client.deleteByQuery.resolves({ - body: { - total: 2, - deleted: 2, - failures: [], - }, + total: 2, + deleted: 2, + failures: [], }); elasticsearch._client.indices.refresh.resolves({ - body: { _shards: 1 }, + _shards: 1, }); elasticsearch.mGet = sinon.stub().resolves({ @@ -4680,7 +4563,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { ids: { values: ["mehry", "liia"] } } }, + query: { ids: { values: ["mehry", "liia"] } }, scroll: "5s", }); @@ -4708,7 +4591,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { ids: { values: ["mehry"] } } }, + query: { ids: { values: ["mehry"] } }, scroll: "5s", }); @@ -4733,7 +4616,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { ids: { values: ["mehry"] } } }, + query: { ids: { values: ["mehry"] } }, scroll: "5s", }); @@ -4754,7 +4637,7 @@ describe("Test: ElasticSearch service", () => { return promise.then(() => { should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ index: alias, - body: { query: { ids: { values: ["mehry", "liia"] } } }, + query: { ids: { values: ["mehry", "liia"] } }, scroll: "5s", refresh: true, }); @@ -4768,7 +4651,7 @@ describe("Test: ElasticSearch service", () => { beforeEach(() => { esRequest = { refresh: undefined, - body: [ + operations: [ { index: { _index: alias, _id: "liia" } }, { city: "Kathmandu" }, { update: { _index: alias, _id: "mehry" } }, @@ -4790,28 +4673,26 @@ describe("Test: ElasticSearch service", () => { ]; elasticsearch._client.bulk.resolves({ - body: { - items: [ - { - index: { - _id: "liia", - status: 201, - _version: 1, - result: "created", - created: true, - foo: "bar", - }, + items: [ + { + index: { + _id: "liia", + status: 201, + _version: 1, + result: "created", + created: true, + foo: "bar", }, - { - index: { - _id: "mehry", - status: 400, - error: { reason: "bad request" }, - bar: "foo", - }, + }, + { + index: { + _id: "mehry", + status: 400, + error: { reason: "bad request" }, + bar: "foo", }, - ], - }, + }, + ], }); }); @@ -5015,13 +4896,11 @@ describe("Test: ElasticSearch service", () => { describe("#getSchema", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [ - { alias: "@&nepali.mehry" }, - { alias: "@&nepali._kuzzle_keep" }, - { alias: "@&istanbul._kuzzle_keep" }, - ], - }); + elasticsearch._client.cat.aliases.resolves([ + { alias: "@&nepali.mehry" }, + { alias: "@&nepali._kuzzle_keep" }, + { alias: "@&istanbul._kuzzle_keep" }, + ]); }); it("should returns the DB schema without hidden collections", async () => { @@ -5038,9 +4917,7 @@ describe("Test: ElasticSearch service", () => { const hiddenAlias = `@${hiddenIndice}`; beforeEach(() => { - elasticsearch._client.cat.aliases.resolves({ - body: [], - }); + elasticsearch._client.cat.aliases.resolves([]); sinon.stub(elasticsearch, "_getAvailableIndice").resolves(hiddenIndice); sinon.stub(elasticsearch, "_getWaitForActiveShards").returns(1); @@ -5057,12 +4934,10 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.indices.create).be.calledWithMatch({ index: hiddenIndice, - body: { - aliases: { [hiddenAlias]: {} }, - settings: { - number_of_shards: 1, - number_of_replicas: 1, - }, + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 1, + number_of_replicas: 1, }, }); should(Mutex.prototype.lock).be.called(); @@ -5070,9 +4945,7 @@ describe("Test: ElasticSearch service", () => { }); it("does not create the hidden collection if it already exists", async () => { - elasticsearch._client.cat.aliases.resolves({ - body: [{ alias: hiddenAlias }], - }); + elasticsearch._client.cat.aliases.resolves([{ alias: hiddenAlias }]); await elasticsearch._createHiddenCollection("nisantasi"); @@ -5090,12 +4963,10 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.indices.create).be.calledWithMatch({ index: hiddenIndice, - body: { - aliases: { [hiddenAlias]: {} }, - settings: { - number_of_shards: 42, - number_of_replicas: 42, - }, + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 42, + number_of_replicas: 42, }, }); should(Mutex.prototype.lock).be.called(); @@ -5109,12 +4980,10 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.indices.create).be.calledWithMatch({ index: hiddenIndice, - body: { - aliases: { [hiddenAlias]: {} }, - settings: { - number_of_shards: 1, - number_of_replicas: 1, - }, + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 1, + number_of_replicas: 1, }, wait_for_active_shards: "all", }); @@ -5127,12 +4996,10 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._client.indices.create).be.calledWithMatch({ index: hiddenIndice, - body: { - aliases: { [hiddenAlias]: {} }, - settings: { - number_of_shards: 1, - number_of_replicas: 1, - }, + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 1, + number_of_replicas: 1, }, wait_for_active_shards: 1, }); @@ -5247,8 +5114,8 @@ describe("Test: ElasticSearch service", () => { privateBody = [ { alias: "@%nepali.mehry", index: "%nepalu.mehry", filter: 0 }, ]; - publicES._client.cat.aliases.resolves({ body: publicBody }); - internalES._client.cat.aliases.resolves({ body: privateBody }); + publicES._client.cat.aliases.resolves(publicBody); + internalES._client.cat.aliases.resolves(privateBody); const publicIndice = await publicES._getIndice("nepali", "liia"); const internalIndice = await internalES._getIndice("nepali", "mehry"); @@ -5258,8 +5125,8 @@ describe("Test: ElasticSearch service", () => { }); it("throw if there is no indice associated with the alias", async () => { - publicES._client.cat.aliases.resolves({ body: [] }); - internalES._client.cat.aliases.resolves({ body: [] }); + publicES._client.cat.aliases.resolves([]); + internalES._client.cat.aliases.resolves([]); await should(publicES._getIndice("nepali", "liia")).be.rejectedWith({ id: "services.storage.unknown_index_collection", @@ -5279,8 +5146,8 @@ describe("Test: ElasticSearch service", () => { { alias: "@%nepali.mehry", index: "%nepalu.mehry", filter: 0 }, { alias: "@%nepali.mehry", index: "%nepali.mehry", filter: 0 }, ]; - publicES._client.cat.aliases.resolves({ body: publicBody }); - internalES._client.cat.aliases.resolves({ body: privateBody }); + publicES._client.cat.aliases.resolves(publicBody); + internalES._client.cat.aliases.resolves(privateBody); await should(publicES._getIndice("nepali", "liia")).be.rejectedWith({ id: "services.storage.multiple_indice_alias", @@ -5294,8 +5161,8 @@ describe("Test: ElasticSearch service", () => { describe("#_getAvailableIndice", () => { it("return simple indice whenever it is possible", async () => { - publicES._client.indices.exists.resolves({ body: false }); - internalES._client.indices.exists.resolves({ body: false }); + publicES._client.indices.exists.resolves(false); + internalES._client.indices.exists.resolves(false); const publicIndice = await publicES._getAvailableIndice( "nepali", @@ -5313,12 +5180,12 @@ describe("Test: ElasticSearch service", () => { it("return a suffixed indice if necessary (indice already taken)", async () => { publicES._client.indices.exists .onFirstCall() - .resolves({ body: true }) - .resolves({ body: false }); + .resolves(true) + .resolves(false); internalES._client.indices.exists .onFirstCall() - .resolves({ body: true }) - .resolves({ body: false }); + .resolves(true) + .resolves(false); const publicIndice = await publicES._getAvailableIndice( "nepali", @@ -5341,12 +5208,12 @@ describe("Test: ElasticSearch service", () => { "averyverylongcollectionwhichhasexactlythemaximumlengthacceptedofonehundredandtwentysixcharactersandthatswaytoolongdontyouthink"; publicES._client.indices.exists .onFirstCall() - .resolves({ body: true }) - .resolves({ body: false }); + .resolves(true) + .resolves(false); internalES._client.indices.exists .onFirstCall() - .resolves({ body: true }) - .resolves({ body: false }); + .resolves(true) + .resolves(false); const publicIndice = await publicES._getAvailableIndice( longIndex, @@ -5490,7 +5357,7 @@ describe("Test: ElasticSearch service", () => { it("should return all if an Elasticsearch cluster is used", async () => { elasticsearch._client.cat.nodes = sinon .stub() - .resolves({ body: ["node1", "node2"] }); + .resolves(["node1", "node2"]); const waitForActiveShards = await elasticsearch._getWaitForActiveShards(); @@ -5499,26 +5366,22 @@ describe("Test: ElasticSearch service", () => { }); it("should return 1 if a single node Elasticsearch cluster is used", async () => { - elasticsearch._client.cat.nodes = sinon - .stub() - .resolves({ body: ["node1"] }); + elasticsearch._client.cat.nodes = sinon.stub().resolves(["node1"]); const waitForActiveShards = await elasticsearch._getWaitForActiveShards(); - should(waitForActiveShards).be.eql("1"); + should(waitForActiveShards).be.eql(1); }); }); describe("#generateMissingAliases", () => { - const indicesBody = { - body: [ - { index: "&nepali.liia", status: "open" }, - { index: "%nepali.liia", status: "open" }, - { index: "&nepali.mehry", status: "open" }, - { index: "%nepali.mehry", status: "open" }, - ], - }; + const indicesBody = [ + { index: "&nepali.liia", status: "open" }, + { index: "%nepali.liia", status: "open" }, + { index: "&nepali.mehry", status: "open" }, + { index: "%nepali.mehry", status: "open" }, + ]; let aliasesList = [ { alias: "@&nepali.lia", From a18d454b36d5fe565ad6b08a772c13c3e6a16bab Mon Sep 17 00:00:00 2001 From: Kuruyia <8174691+Kuruyia@users.noreply.github.com> Date: Fri, 5 Jan 2024 17:45:25 +0100 Subject: [PATCH 08/59] feat(storage): add more types, fix some requests --- lib/service/storage/elasticsearch.ts | 28 ++++++++++------------ test/service/storage/elasticsearch.test.js | 14 +++++------ 2 files changed, 19 insertions(+), 23 deletions(-) diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 9c9188282f..482822c97d 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -256,7 +256,7 @@ export default class ElasticSearch extends Service { * @returns {Promise.} */ async stats() { - const esRequest = { + const esRequest: estypes.IndicesStatsRequest = { metric: ["docs", "store"], }; @@ -610,7 +610,7 @@ export default class ElasticSearch extends Service { * @returns {Promise.<{ _id, _version, _source }>} */ async get(index, collection, id) { - const esRequest = { + const esRequest: estypes.GetRequest = { id, index: this._getAlias(index, collection), }; @@ -653,13 +653,11 @@ export default class ElasticSearch extends Service { return { errors: [], item: [] }; } - const esRequest = { - body: { - docs: ids.map((_id) => ({ - _id, - _index: this._getAlias(index, collection), - })), - }, + const esRequest: estypes.MgetRequest = { + docs: ids.map((_id) => ({ + _id, + _index: this._getAlias(index, collection), + })), }; debug("Multi-get documents: %o", esRequest); @@ -700,8 +698,8 @@ export default class ElasticSearch extends Service { * @returns {Promise.} count */ async count(index: string, collection: string, searchBody = {}) { - const esRequest = { - body: this._sanitizeSearchBody(searchBody), + const esRequest: estypes.CountRequest = { + ...this._sanitizeSearchBody(searchBody), index: this._getAlias(index, collection), }; @@ -1085,7 +1083,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; } = {}, ) { - const esRequest = { + const esRequest: estypes.DeleteRequest = { id, index: this._getAlias(index, collection), refresh, @@ -1279,8 +1277,8 @@ export default class ElasticSearch extends Service { } = {}, ) { try { - const esRequest = { - body: this._sanitizeSearchBody({ query }), + const esRequest: estypes.SearchRequest = { + ...this._sanitizeSearchBody({ query }), index: this._getAlias(index, collection), scroll: "5s", size, @@ -1678,7 +1676,7 @@ export default class ElasticSearch extends Service { settings?: Record; } = {}, ) { - const esRequest = { + const esRequest: estypes.IndicesGetSettingsRequest = { index: await this._getIndice(index, collection), }; diff --git a/test/service/storage/elasticsearch.test.js b/test/service/storage/elasticsearch.test.js index 1db27690b5..8f80c8a68e 100644 --- a/test/service/storage/elasticsearch.test.js +++ b/test/service/storage/elasticsearch.test.js @@ -700,12 +700,10 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.mget).be.calledWithMatch({ - body: { - docs: [ - { _id: "liia", _index: alias }, - { _id: "mhery", _index: alias }, - ], - }, + docs: [ + { _id: "liia", _index: alias }, + { _id: "mhery", _index: alias }, + ], }); should(result).match({ @@ -784,8 +782,8 @@ describe("Test: ElasticSearch service", () => { return promise.then((result) => { should(elasticsearch._client.count).be.calledWithMatch({ + ...filter, index: alias, - body: filter, }); should(result).be.eql(42); @@ -1564,7 +1562,7 @@ describe("Test: ElasticSearch service", () => { should(elasticsearch._getAllDocumentsFromQuery).be.calledWithMatch({ index: alias, - body: { query: { filter: "term" } }, + query: { filter: "term" }, scroll: "5s", size: 3, }); From e472d9e8ed83b3f65d2f014781b964b2dc774541 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 16 Jan 2024 15:22:05 +0100 Subject: [PATCH 09/59] chore(elasticsearch): type elasticsearch.ts file --- lib/service/storage/elasticsearch.ts | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 482822c97d..9ea3daf41c 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -105,12 +105,7 @@ export default class ElasticSearch extends Service { public scrollTTL: number; public _config: any; - /** - * Returns a new elasticsearch client instance - * - * @returns {Object} - */ - static buildClient(config: ClientOptions) { + static buildClient(config: ClientOptions): StorageClient { return new StorageClient(config); } From bdcce96fc97ff58143f29484ae1da2076ee2e885 Mon Sep 17 00:00:00 2001 From: rolljee Date: Thu, 18 Jan 2024 11:04:56 +0100 Subject: [PATCH 10/59] feat(es8): elasticsearch 8 unit tests & functional tests running --- .ci/services.yml | 14 +- .ci/test-cluster.yml | 14 +- docker-compose.yml | 10 +- features/support/hooks.js | 24 +-- lib/service/storage/elasticsearch.ts | 225 +++++++++++---------- lib/service/storage/esWrapper.js | 4 +- lib/types/storage/Elasticsearch.ts | 27 +++ test/service/storage/elasticsearch.test.js | 8 +- 8 files changed, 192 insertions(+), 134 deletions(-) diff --git a/.ci/services.yml b/.ci/services.yml index f191b2e2e4..2ef4d93014 100644 --- a/.ci/services.yml +++ b/.ci/services.yml @@ -26,6 +26,14 @@ services: image: redis:6 elasticsearch: - image: kuzzleio/elasticsearch:7 - ulimits: - nofile: 65536 + image: elasticsearch:8.11.3 + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true + ports: + - '9200:9200' diff --git a/.ci/test-cluster.yml b/.ci/test-cluster.yml index 9fe99e878a..ae0dc6467a 100644 --- a/.ci/test-cluster.yml +++ b/.ci/test-cluster.yml @@ -67,6 +67,14 @@ services: image: redis:6 elasticsearch: - image: kuzzleio/elasticsearch:7 - ulimits: - nofile: 65536 + image: elasticsearch:8.11.3 + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true + ports: + - '9200:9200' \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 4a9f3b7ac4..b8408570d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -111,8 +111,16 @@ services: retries: 30 elasticsearch: - image: kuzzleio/elasticsearch:7 + image: elasticsearch:8.11.3 container_name: kuzzle_elasticsearch + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true ports: - '9200:9200' healthcheck: diff --git a/features/support/hooks.js b/features/support/hooks.js index 1982416528..b9e2e1a518 100644 --- a/features/support/hooks.js +++ b/features/support/hooks.js @@ -8,23 +8,23 @@ const { After, Before, BeforeAll } = require("cucumber"), async function resetSecurityDefault(sdk) { await sdk.query({ - controller: "admin", action: "resetSecurity", + controller: "admin", refresh: "wait_for", }); sdk.jwt = null; await sdk.query({ - controller: "admin", action: "loadSecurities", body: testPermissions, + controller: "admin", refresh: "wait_for", }); await sdk.auth.login("local", { - username: "test-admin", password: "password", + username: "test-admin", }); } @@ -42,9 +42,9 @@ BeforeAll({ timeout: 10 * 1000 }, async function () { console.log("Loading default permissions.."); await world.sdk.query({ - controller: "admin", action: "loadSecurities", body: testPermissions, + controller: "admin", onExistingUsers: "overwrite", refresh: "wait_for", }); @@ -56,15 +56,15 @@ Before({ timeout: 10 * 1000 }, async function () { await this.sdk.connect(); await this.sdk.auth.login("local", { - username: "test-admin", password: "password", + username: "test-admin", }); }); Before({ tags: "not @preserveDatabase" }, async function () { await this.sdk.query({ - controller: "admin", action: "resetDatabase", + controller: "admin", refresh: "wait_for", }); }); @@ -106,8 +106,8 @@ Before({ tags: "@not-http" }, async function () { Before({ tags: "@firstAdmin" }, async function () { await this.sdk.query({ - controller: "admin", action: "resetSecurity", + controller: "admin", refresh: "wait_for", }); @@ -128,16 +128,16 @@ After({ tags: "@security", timeout: 60 * 1000 }, async function () { Before({ tags: "@mappings" }, async function () { await this.sdk.query({ - controller: "admin", action: "loadMappings", body: testMappings, + controller: "admin", refresh: "wait_for", }); await this.sdk.query({ - controller: "admin", action: "loadFixtures", body: testFixtures, + controller: "admin", refresh: "wait_for", }); }); @@ -146,13 +146,13 @@ Before({ tags: "@mappings" }, async function () { After({ tags: "@events" }, async function () { await this.sdk.query({ - controller: "functional-test-plugin/pipes", action: "deactivateAll", + controller: "functional-test-plugin/pipes", }); await this.sdk.query({ - controller: "pipes", action: "deactivateAll", + controller: "pipes", }); }); @@ -160,8 +160,8 @@ After({ tags: "@events" }, async function () { After({ tags: "@login" }, async function () { await this.sdk.auth.login("local", { - username: "test-admin", password: "password", + username: "test-admin", }); }); diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 9ea3daf41c..144e2ec502 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -32,6 +32,9 @@ import { KImportError, KRequestBody, KRequestParams, + KStats, + KStatsIndexes, + KUpdateResponse, } from "../../types/storage/Elasticsearch"; import assert from "assert"; @@ -177,7 +180,7 @@ export default class ElasticSearch extends Service { "Your dynamic mapping policy is set to 'true' for new fields.", "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n"), + ].join("\n") ); } @@ -194,7 +197,7 @@ export default class ElasticSearch extends Service { "services", "storage", "version_mismatch", - version.number, + version.number ); } @@ -215,34 +218,29 @@ export default class ElasticSearch extends Service { * Returns some basic information about this service * @override * - * @returns {Promise.} service informations + * @returns {Promise.} service informations */ - info() { + async info(): Promise { const result: InfoResult = { type: "elasticsearch", version: this._esVersion, }; - return this._client - .info() - .then((body) => { - result.version = body.version.number; - result.lucene = body.version.lucene_version; - - return this._client.cluster.health(); - }) - .then((body) => { - result.status = body.status; + try { + const info = await this._client.info(); + result.version = info.version.number; + result.lucene = info.version.lucene_version; - return this._client.cluster.stats({ human: true }); - }) - .then((body) => { - result.spaceUsed = body.indices.store.size; - result.nodes = body.nodes; + const health = await this._client.cluster.health(); + result.status = health.status; - return result; - }) - .catch((error) => this._esWrapper.reject(error)); + const stats = await this._client.cluster.stats({ human: true }); + result.spaceUsed = stats.indices.store.size; + result.nodes = stats.nodes; + return result; + } catch (error) { + return this._esWrapper.reject(error); + } } /** @@ -250,16 +248,16 @@ export default class ElasticSearch extends Service { * * @returns {Promise.} */ - async stats() { + async stats(): Promise { const esRequest: estypes.IndicesStatsRequest = { metric: ["docs", "store"], }; - const body = await this._client.indices.stats(esRequest); - const indexes = {}; + const stats = await this._client.indices.stats(esRequest); + const indexes: KStatsIndexes = {}; let size = 0; - for (const [indice, indiceInfo] of Object.entries(body.indices)) { + for (const [indice, indiceInfo] of Object.entries(stats.indices)) { const infos = indiceInfo as any; // Ignore non-Kuzzle indices if ( @@ -288,11 +286,13 @@ export default class ElasticSearch extends Service { size: 0, }; } + indexes[indexName].collections.push({ documentCount: infos.total.docs.count, name: collectionName, size: infos.total.store.size_in_bytes, }); + indexes[indexName].size += infos.total.store.size_in_bytes; size += infos.total.store.size_in_bytes; } @@ -333,14 +333,14 @@ export default class ElasticSearch extends Service { "services", "storage", "scroll_duration_too_great", - _scrollTTL, + _scrollTTL ); } } const stringifiedScrollInfo = await global.kuzzle.ask( "core:cache:internal:get", - cacheKey, + cacheKey ); if (!stringifiedScrollInfo) { @@ -366,7 +366,7 @@ export default class ElasticSearch extends Service { JSON.stringify(scrollInfo), { ttl: ms(_scrollTTL) || this.scrollTTL, - }, + } ); } @@ -408,7 +408,7 @@ export default class ElasticSearch extends Service { from?: number; size?: number; scroll?: string; - } = {}, + } = {} ) { let esIndexes: any; @@ -444,7 +444,7 @@ export default class ElasticSearch extends Service { "services", "storage", "scroll_duration_too_great", - scroll, + scroll ); } } @@ -470,7 +470,7 @@ export default class ElasticSearch extends Service { index, targets, }), - { ttl }, + { ttl } ); remaining = totalHitsValue - body.hits.hits.length; @@ -482,6 +482,7 @@ export default class ElasticSearch extends Service { targets, }); } catch (error) { + console.error(error); throw this._esWrapper.formatESError(error); } } @@ -513,7 +514,7 @@ export default class ElasticSearch extends Service { async _formatSearchResult( body: any, remaining?: number, - searchInfo: any = {}, + searchInfo: any = {} ) { let aliasToTargets = {}; const aliasCache = new Map(); @@ -574,7 +575,7 @@ export default class ElasticSearch extends Service { for (const [name, innerHit] of Object.entries(innerHits)) { formattedInnerHits[name] = await Bluebird.map( (innerHit as any).hits.hits, - formatHit, + formatHit ); } return formattedInnerHits; @@ -733,7 +734,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { assertIsObject(content); @@ -797,7 +798,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const esRequest: estypes.IndexRequest> = { document: content, @@ -861,13 +862,13 @@ export default class ElasticSearch extends Service { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {}, - ) { + } = {} + ): Promise { const esRequest: estypes.UpdateRequest< KRequestBody, KRequestBody > = { - _source: "true", + _source: true, doc: content, id, index: this._getAlias(index, collection), @@ -892,6 +893,7 @@ export default class ElasticSearch extends Service { try { const body = await this._client.update(esRequest); + return { _id: body._id, _source: body.get._source, @@ -931,13 +933,13 @@ export default class ElasticSearch extends Service { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const esRequest: estypes.UpdateRequest< KRequestBody, KRequestBody > = { - _source: "true", + _source: true, doc: content, id, index: this._getAlias(index, collection), @@ -1007,7 +1009,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const esRequest: estypes.IndexRequest> = { @@ -1040,7 +1042,7 @@ export default class ElasticSearch extends Service { "not_found", id, index, - collection, + collection ); } @@ -1076,7 +1078,7 @@ export default class ElasticSearch extends Service { refresh, }: { refresh?: boolean | "wait_for"; - } = {}, + } = {} ) { const esRequest: estypes.DeleteRequest = { id, @@ -1124,7 +1126,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; size?: number; fetch?: boolean; - } = {}, + } = {} ) { const esRequest = { ...this._sanitizeSearchBody({ query }), @@ -1150,10 +1152,16 @@ export default class ElasticSearch extends Service { esRequest.refresh = refresh === "wait_for" ? true : refresh; - const body = await this._client.deleteByQuery({ + const request = { ...esRequest, max_docs: size, - }); + }; + + if (request.max_docs === -1) { + request.max_docs = undefined; + } + + const body = await this._client.deleteByQuery(request); return { deleted: body.deleted, @@ -1191,7 +1199,7 @@ export default class ElasticSearch extends Service { }: { refresh?: boolean | "wait_for"; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const esRequest: estypes.GetRequest = { @@ -1269,7 +1277,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; size?: number; userId?: string; - } = {}, + } = {} ) { try { const esRequest: estypes.SearchRequest = { @@ -1292,7 +1300,7 @@ export default class ElasticSearch extends Service { index, collection, documents, - { refresh, userId }, + { refresh, userId } ); return { @@ -1324,7 +1332,7 @@ export default class ElasticSearch extends Service { refresh = false, }: { refresh?: boolean; - } = {}, + } = {} ) { const script = { params: {}, @@ -1366,7 +1374,7 @@ export default class ElasticSearch extends Service { "storage", "incomplete_update", response.updated, - errors, + errors ); } @@ -1398,7 +1406,7 @@ export default class ElasticSearch extends Service { }: { size?: number; scrollTTl?: string; - } = {}, + } = {} ): Promise { const esRequest: estypes.SearchRequest = { ...this._sanitizeSearchBody({ query }), @@ -1475,7 +1483,7 @@ export default class ElasticSearch extends Service { "storage", "index_already_exists", indexType, - index, + index ); } } @@ -1504,7 +1512,7 @@ export default class ElasticSearch extends Service { }: { mappings?: estypes.MappingTypeMapping; settings?: Record; - } = {}, + } = {} ) { this._assertValidIndexAndCollection(index, collection); @@ -1513,7 +1521,7 @@ export default class ElasticSearch extends Service { "services", "storage", "collection_reserved", - HIDDEN_COLLECTION, + HIDDEN_COLLECTION ); } @@ -1554,7 +1562,7 @@ export default class ElasticSearch extends Service { dynamic: mappings.dynamic || this._config.commonMapping.dynamic, properties: _.merge( mappings.properties, - this._config.commonMapping.properties, + this._config.commonMapping.properties ), }; @@ -1625,7 +1633,7 @@ export default class ElasticSearch extends Service { includeKuzzleMeta = false, }: { includeKuzzleMeta?: boolean; - } = {}, + } = {} ) { const indice = await this._getIndice(index, collection); const esRequest: estypes.IndicesGetMappingRequest = { @@ -1669,7 +1677,7 @@ export default class ElasticSearch extends Service { }: { mappings?: estypes.MappingTypeMapping; settings?: Record; - } = {}, + } = {} ) { const esRequest: estypes.IndicesGetSettingsRequest = { index: await this._getIndice(index, collection), @@ -1774,7 +1782,7 @@ export default class ElasticSearch extends Service { async updateMapping( index: string, collection: string, - mappings: estypes.MappingTypeMapping = {}, + mappings: estypes.MappingTypeMapping = {} ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { let esRequest: estypes.IndicesPutMappingRequest = { index: this._getAlias(index, collection), @@ -1805,7 +1813,7 @@ export default class ElasticSearch extends Service { const fullProperties = _.merge( collectionMappings.properties, - mappings.properties, + mappings.properties ); return { @@ -1907,15 +1915,16 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const dateNow = Date.now(); - const esRequest = { - body: documents, + const esRequest: estypes.BulkRequest = { + operations: documents, refresh, timeout, }; + const kuzzleMeta = { created: { author: getKuid(userId), @@ -1934,16 +1943,14 @@ export default class ElasticSearch extends Service { this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); - let response: Record; + let body: estypes.BulkResponse; try { - response = await this._client.bulk(esRequest); + body = await this._client.bulk(esRequest); } catch (error) { throw this._esWrapper.formatESError(error); } - const body = response.body; - const result = { errors: [], items: [], @@ -2069,7 +2076,7 @@ export default class ElasticSearch extends Service { for (const [index, collections] of Object.entries(schema)) { schema[index] = (collections as string[]).filter( - (c) => c !== HIDDEN_COLLECTION, + (c) => c !== HIDDEN_COLLECTION ); } @@ -2170,7 +2177,7 @@ export default class ElasticSearch extends Service { return request; }, - { index: [] }, + { index: [] } ); if (esRequest.index.length === 0) { @@ -2239,7 +2246,7 @@ export default class ElasticSearch extends Service { async exists( index: string, collection: string, - id: string, + id: string ): Promise { const esRequest: estypes.ExistsRequest = { id, @@ -2367,7 +2374,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2467,7 +2474,7 @@ export default class ElasticSearch extends Service { injectKuzzleMeta = true, limits = true, source = true, - }: KRequestParams = {}, + }: KRequestParams = {} ) { let kuzzleMeta = {}; @@ -2491,7 +2498,7 @@ export default class ElasticSearch extends Service { }; const { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta, + kuzzleMeta ); esRequest.operations = []; @@ -2539,7 +2546,7 @@ export default class ElasticSearch extends Service { retryOnConflict = 0, timeout = undefined, userId = null, - } = {}, + } = {} ) { const alias = this._getAlias(index, collection), toImport = [], @@ -2557,7 +2564,7 @@ export default class ElasticSearch extends Service { }, { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta, + kuzzleMeta ); /** @@ -2640,7 +2647,7 @@ export default class ElasticSearch extends Service { retryOnConflict?: number; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const esRequest: estypes.BulkRequest = { @@ -2672,7 +2679,7 @@ export default class ElasticSearch extends Service { { prepareMUpsert: true, requireId: true, - }, + } ); /** @@ -2694,7 +2701,7 @@ export default class ElasticSearch extends Service { { doc: extractedDocuments[i]._source.changes, upsert: extractedDocuments[i]._source.default, - }, + } ); // _source: true // Makes ES return the updated document source in the response. @@ -2705,7 +2712,7 @@ export default class ElasticSearch extends Service { const response = await this._mExecute( esRequest, extractedDocuments, - rejected, + rejected ); // with _source: true, ES returns the updated document in @@ -2747,7 +2754,7 @@ export default class ElasticSearch extends Service { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2839,7 +2846,7 @@ export default class ElasticSearch extends Service { }: { refresh?: boolean | "wait_for"; timeout?: number; - } = {}, + } = {} ) { const query = { ids: { values: [] } }; const validIds = []; @@ -2917,7 +2924,7 @@ export default class ElasticSearch extends Service { esRequest: estypes.BulkRequest, documents: JSONObject[], partialErrors: JSONObject[] = [], - { limits = true, source = true } = {}, + { limits = true, source = true } = {} ) { assertWellFormedRefresh(esRequest); @@ -2998,7 +3005,7 @@ export default class ElasticSearch extends Service { _extractMDocuments( documents: JSONObject[], metadata: JSONObject, - { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}, + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} ) { const rejected = []; const extractedDocuments = []; @@ -3047,7 +3054,7 @@ export default class ElasticSearch extends Service { metadata, document, extractedDocuments, - documentsToGet, + documentsToGet ); } } @@ -3069,7 +3076,7 @@ export default class ElasticSearch extends Service { metadata: JSONObject, document: JSONObject, extractedDocuments: JSONObject[], - documentsToGet: JSONObject[], + documentsToGet: JSONObject[] ) { let extractedDocument; @@ -3082,7 +3089,7 @@ export default class ElasticSearch extends Service { {}, metadata.upsert, document.changes, - document.default, + document.default ), }, }; @@ -3129,7 +3136,7 @@ export default class ElasticSearch extends Service { "storage", "invalid_mapping", currentPath, - didYouMean(property, mappingProperties), + didYouMean(property, mappingProperties) ); } @@ -3190,7 +3197,7 @@ export default class ElasticSearch extends Service { "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, - '"indices"', + '"indices"' ); } @@ -3205,7 +3212,7 @@ export default class ElasticSearch extends Service { * @private */ async _getSettings( - esRequest: estypes.IndicesGetSettingsRequest, + esRequest: estypes.IndicesGetSettingsRequest ): Promise { const response = await this._client.indices.getSettings(esRequest); const index = esRequest.index as string; @@ -3224,10 +3231,10 @@ export default class ElasticSearch extends Service { */ async _getAvailableIndice( index: string, - collection: string, + collection: string ): Promise { let indice = this._getAlias(index, collection).substring( - INDEX_PREFIX_POSITION_IN_ALIAS, + INDEX_PREFIX_POSITION_IN_ALIAS ); if (!(await this._client.indices.exists({ index: indice }))) { @@ -3264,9 +3271,9 @@ export default class ElasticSearch extends Service { * @throws If there is not exactly one alias associated that is prefixed with @ */ async _getAliasFromIndice(indice) { - const { body } = await this._client.indices.getAlias({ index: indice }); + const body = await this._client.indices.getAlias({ index: indice }); const aliases = Object.keys(body[indice].aliases).filter((alias) => - alias.startsWith(ALIAS_PREFIX), + alias.startsWith(ALIAS_PREFIX) ); if (aliases.length < 1) { @@ -3292,7 +3299,7 @@ export default class ElasticSearch extends Service { const indicesWithoutAlias = indices.filter( (indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice), + !aliases.some((alias) => alias.indice === indice) ); const esRequest = { body: { actions: [] } }; @@ -3326,7 +3333,7 @@ export default class ElasticSearch extends Service { "services", "storage", "invalid_collection_name", - collection, + collection ); } } @@ -3341,7 +3348,7 @@ export default class ElasticSearch extends Service { _extractIndex(alias) { return alias.substr( INDEX_PREFIX_POSITION_IN_ALIAS + 1, - alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1, + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 ); } @@ -3481,7 +3488,7 @@ export default class ElasticSearch extends Service { _id: h._id, _source: h._source, body: {}, - })), + })) ); } @@ -3532,7 +3539,7 @@ export default class ElasticSearch extends Service { "services", "storage", "invalid_query_keyword", - `${key}.${scriptArg}`, + `${key}.${scriptArg}` ); } } @@ -3587,14 +3594,14 @@ export default class ElasticSearch extends Service { assert( typeof configValue === "string", - `services.storageEngine.${key} must be a string.`, + `services.storageEngine.${key} must be a string.` ); const parsedValue = ms(configValue); assert( typeof parsedValue === "number", - `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`, + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` ); return parsedValue; @@ -3647,7 +3654,7 @@ export default class ElasticSearch extends Service { esState = esStateEnum.OK; } else { global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining`, + `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining` ); await Bluebird.delay(1000); } @@ -3672,7 +3679,7 @@ export default class ElasticSearch extends Service { "storage", "invalid_mapping", path, - "Dynamic property value should be a string.", + "Dynamic property value should be a string." ); } @@ -3683,8 +3690,8 @@ export default class ElasticSearch extends Service { "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( - '", "', - )}"`, + '", "' + )}"` ); } } @@ -3693,7 +3700,7 @@ export default class ElasticSearch extends Service { _setLastActionToKuzzleMeta( esRequest: JSONObject, alias: string, - kuzzleMeta: JSONObject, + kuzzleMeta: JSONObject ) { /** * @warning Critical code section @@ -3703,8 +3710,8 @@ export default class ElasticSearch extends Service { let lastAction = ""; const actionNames = ["index", "create", "update", "delete"]; - for (let i = 0; i < esRequest.body.length; i++) { - const item = esRequest.body[i]; + for (let i = 0; i < esRequest.operations.length; i++) { + const item = esRequest.operations[i]; const action = Object.keys(item)[0]; if (actionNames.indexOf(action) !== -1) { @@ -3791,7 +3798,7 @@ function assertWellFormedRefresh(esRequest) { "storage", "invalid_argument", "refresh", - '"wait_for", false', + '"wait_for", false' ); } } diff --git a/lib/service/storage/esWrapper.js b/lib/service/storage/esWrapper.js index c4d85922f8..8b8b760cf8 100644 --- a/lib/service/storage/esWrapper.js +++ b/lib/service/storage/esWrapper.js @@ -135,7 +135,7 @@ const errorMessagesMapping = [ { // mapping set to strict, dynamic introduction of [lehuong] within [_doc] is not allowed regex: - /^mapping set to strict, dynamic introduction of \[(.+)\] within \[.+\] is not allowed/, + /^\[(.+)\] mapping set to strict, dynamic introduction of \[(.+)\] within \[.+\] is not allowed/, subcode: "strict_mapping_rejection", getPlaceholders: (esError, matches) => { // "/%26index.collection/_doc" @@ -145,7 +145,7 @@ const errorMessagesMapping = [ // keep only "collection" const collection = esPath.substr(esPath.indexOf(".") + 1).split("/")[0]; - return [matches[1], index, collection]; + return [matches[2], index, collection]; }, }, { diff --git a/lib/types/storage/Elasticsearch.ts b/lib/types/storage/Elasticsearch.ts index c58c38d0fe..f06f48f86f 100644 --- a/lib/types/storage/Elasticsearch.ts +++ b/lib/types/storage/Elasticsearch.ts @@ -9,6 +9,33 @@ export type InfoResult = { nodes?: estypes.ClusterStatsClusterNodes; }; +export type KUpdateResponse = { + _id: string; + _source: unknown; + _version: number; +} + +export type KStatsIndexesCollection = { + documentCount: number; + name: string; + size: estypes.ByteSize; +}; + +export type KStatsIndex = { + collections: KStatsIndexesCollection[]; + name: string; + size: estypes.ByteSize; +}; + +export type KStatsIndexes = { + [key: string]: KStatsIndex; +}; + +export type KStats = { + indexes: KStatsIndex[]; + size: estypes.ByteSize; +}; + export type KRequestBody = T & { _kuzzle_info?: { author: string; diff --git a/test/service/storage/elasticsearch.test.js b/test/service/storage/elasticsearch.test.js index 8f80c8a68e..af3d9f78ee 100644 --- a/test/service/storage/elasticsearch.test.js +++ b/test/service/storage/elasticsearch.test.js @@ -1027,7 +1027,7 @@ describe("Test: ElasticSearch service", () => { }, id: "liia", refresh: "wait_for", - _source: "true", + _source: true, retry_on_conflict: 42, }); @@ -1077,7 +1077,7 @@ describe("Test: ElasticSearch service", () => { }, id: "liia", refresh: "wait_for", - _source: "true", + _source: true, retry_on_conflict: elasticsearch.config.defaults.onUpdateConflictRetries, }); @@ -1253,7 +1253,7 @@ describe("Test: ElasticSearch service", () => { }, id: "liia", refresh: "wait_for", - _source: "true", + _source: true, retry_on_conflict: 42, }); @@ -1305,7 +1305,7 @@ describe("Test: ElasticSearch service", () => { }, id: "liia", refresh: "wait_for", - _source: "true", + _source: true, retry_on_conflict: elasticsearch.config.defaults.onUpdateConflictRetries, }); From e12c35af6b3a8d375005177cdf56509396db5cb4 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 13 Feb 2024 19:47:46 +0100 Subject: [PATCH 11/59] feat(es8): support both es7 and es8 --- .eslintignore | 2 +- .gitignore | 2 +- .kuzzlerc.sample.jsonc | 9 + bin/.lib/colorOutput.js | 69 - bin/.upgrades/connectors/es.js | 102 - bin/.upgrades/connectors/redis.js | 110 - bin/.upgrades/lib/connectorContext.js | 38 - bin/.upgrades/lib/context.js | 143 - bin/.upgrades/lib/formatters.js | 103 - bin/.upgrades/lib/inquirerExtended.js | 51 - bin/.upgrades/lib/logger.js | 98 - bin/.upgrades/lib/progressBar.js | 68 - .../versions/v1/checkConfiguration.js | 81 - bin/.upgrades/versions/v1/index.js | 35 - bin/.upgrades/versions/v1/upgradeCache.js | 145 - bin/.upgrades/versions/v1/upgradeStorage.js | 468 -- bin/upgrade | 86 - docker/scripts/start-kuzzle-test.ts | 4 +- lib/config/default.config.ts | 1 + lib/core/backend/backendStorage.ts | 38 +- lib/core/plugin/pluginContext.ts | 39 +- lib/core/storage/clientAdapter.js | 7 +- lib/core/storage/storageEngine.js | 4 +- lib/kuzzle/kuzzle.ts | 2 +- lib/service/storage/7/elasticsearch.js | 2926 +++++++++++++ lib/service/storage/7/elasticsearch.ts | 3844 ++++++++++++++++ lib/service/storage/7/esWrapper.js | 303 ++ lib/service/storage/8/elasticsearch.js | 2922 +++++++++++++ lib/service/storage/8/elasticsearch.ts | 3844 ++++++++++++++++ lib/service/storage/{ => 8}/esWrapper.js | 10 +- .../storage/{ => commons}/queryTranslator.js | 2 +- lib/service/storage/elasticsearch.ts | 3866 +---------------- ...StorageEngineElasticsearchConfiguration.ts | 9 +- lib/types/storage/7/Elasticsearch.js | 3 + lib/types/storage/7/Elasticsearch.ts | 42 + lib/types/storage/8/Elasticsearch.js | 3 + lib/types/storage/{ => 8}/Elasticsearch.ts | 2 +- package-lock.json | 667 ++- package.json | 3 +- test/core/backend/BackendStorage.test.js | 5 - test/core/plugin/context/context.test.js | 79 +- tsconfig.json | 3 +- 42 files changed, 14484 insertions(+), 5754 deletions(-) delete mode 100644 bin/.lib/colorOutput.js delete mode 100644 bin/.upgrades/connectors/es.js delete mode 100644 bin/.upgrades/connectors/redis.js delete mode 100644 bin/.upgrades/lib/connectorContext.js delete mode 100644 bin/.upgrades/lib/context.js delete mode 100644 bin/.upgrades/lib/formatters.js delete mode 100644 bin/.upgrades/lib/inquirerExtended.js delete mode 100644 bin/.upgrades/lib/logger.js delete mode 100644 bin/.upgrades/lib/progressBar.js delete mode 100644 bin/.upgrades/versions/v1/checkConfiguration.js delete mode 100644 bin/.upgrades/versions/v1/index.js delete mode 100644 bin/.upgrades/versions/v1/upgradeCache.js delete mode 100644 bin/.upgrades/versions/v1/upgradeStorage.js delete mode 100755 bin/upgrade create mode 100644 lib/service/storage/7/elasticsearch.js create mode 100644 lib/service/storage/7/elasticsearch.ts create mode 100644 lib/service/storage/7/esWrapper.js create mode 100644 lib/service/storage/8/elasticsearch.js create mode 100644 lib/service/storage/8/elasticsearch.ts rename lib/service/storage/{ => 8}/esWrapper.js (97%) rename lib/service/storage/{ => commons}/queryTranslator.js (99%) create mode 100644 lib/types/storage/7/Elasticsearch.js create mode 100644 lib/types/storage/7/Elasticsearch.ts create mode 100644 lib/types/storage/8/Elasticsearch.js rename lib/types/storage/{ => 8}/Elasticsearch.ts (95%) diff --git a/.eslintignore b/.eslintignore index 3add994fac..4fed7e5c36 100644 --- a/.eslintignore +++ b/.eslintignore @@ -126,7 +126,7 @@ lib/util/dump-collection.js lib/api/controllers/debugController.js lib/api/openapi/components/security/index.js lib/config/documentEventAliases.js -lib/service/storage/elasticsearch.js +lib/service/storage/Elasticsearch.js lib/types/DebugModule.js lib/util/time.js lib/util/async.js diff --git a/.gitignore b/.gitignore index 0c09c0fcde..56b5bcb271 100644 --- a/.gitignore +++ b/.gitignore @@ -162,7 +162,7 @@ lib/model/security/profile.js lib/model/security/role.js lib/model/security/token.js lib/model/security/user.js -lib/service/storage/elasticsearch.js +lib/service/storage/Elasticsearch.js lib/types/ClientConnection.js lib/types/config/DumpConfiguration.js lib/types/config/HttpConfiguration.js diff --git a/.kuzzlerc.sample.jsonc b/.kuzzlerc.sample.jsonc index a979f9a243..9f3508e380 100644 --- a/.kuzzlerc.sample.jsonc +++ b/.kuzzlerc.sample.jsonc @@ -761,6 +761,14 @@ // [storageEngine] // The default storage layer is Elasticsearch and it is // currently the only storage layer we support. + // * majorVersion: + // The major version of the Elasticsearch server. + // This value is used to determine which Elasticsearch client to use. + // Currently, only Elasticsearch 7.x and 8.x are supported. + // * backend: + // The storage engine to use. Currently, only "elasticsearch" is supported. + // * aliases: + // List of aliases for the storageEngine configuration. // * client: // Elasticsearch constructor options. Use this field to specify your // Elasticsearch config options, this object is passed through to the @@ -779,6 +787,7 @@ // - "strict": Rejects document // See https://www.elastic.co/guide/en/elasticsearch/reference/7.4/dynamic-mapping.html "storageEngine": { + "majorVersion": "8", "backend": "elasticsearch", "aliases": [ "storageEngine" diff --git a/bin/.lib/colorOutput.js b/bin/.lib/colorOutput.js deleted file mode 100644 index 446576730c..0000000000 --- a/bin/.lib/colorOutput.js +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const clc = require("cli-color"); - -function noop(str) { - return str; -} - -class ColorOutput { - constructor(opts) { - // /!\ "opts" might be a string - const noColors = - typeof opts === "object" && opts.parent && opts.parent.noColors; - - this.format = { - error: noColors ? noop : clc.red, - warn: noColors ? noop : clc.yellow, - notice: noColors ? noop : clc.cyanBright, - ok: noColors ? noop : clc.green.bold, - question: noColors ? noop : clc.whiteBright, - }; - } - - /* eslint-disable no-console */ - error(str) { - console.error(this.format.error(str)); - } - - warn(str) { - console.warn(this.format.warn(str)); - } - - notice(str) { - console.log(this.format.notice(str)); - } - - question(str) { - console.log(this.format.question(str)); - } - - ok(str) { - console.log(this.format.ok(str)); - } - - /* eslint-enable no-console */ -} - -module.exports = ColorOutput; diff --git a/bin/.upgrades/connectors/es.js b/bin/.upgrades/connectors/es.js deleted file mode 100644 index 425f324764..0000000000 --- a/bin/.upgrades/connectors/es.js +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const { formatWithOptions } = require("util"); - -const { Client } = require("@elastic/elasticsearch"); -const validator = require("validator"); -const _ = require("lodash"); - -const ConnectorContext = require("../lib/connectorContext"); - -let promise = null; - -async function getEsClient(context) { - const currentConfiguration = _.get( - context.config, - "services.storageEngine.client", - ); - - if (!currentConfiguration) { - context.log.error("Missing Kuzzle configuration for Elasticsearch."); - context.log.error( - "Missing configuration value: services.storageEngine.client", - ); - context.log.error("Aborted."); - process.exit(1); - } - - context.log.notice("Current Elasticsearch configuration:"); - context.log.print( - formatWithOptions({ colors: false, depth: null }, currentConfiguration), - ); - - const answers = await context.inquire.prompt([ - { - choices: ["source", "target", "source and target"], - default: "target", - message: "For this migration, use this current instance as the data", - name: "current", - type: "list", - }, - { - default: "", - message: ({ current }) => - `Enter the URL for the ${ - current === "source" ? "target" : "source" - } instance:`, - name: "url", - type: "input", - validate: (url) => { - const opts = { - protocols: ["http", "https"], - require_port: true, - require_protocol: true, - require_tld: false, - require_valid_protocol: true, - }; - - return ( - validator.isURL(url, opts) || - "A valid URL must be provided. Example: http://:" - ); - }, - when: ({ current }) => current !== "source and target", - }, - ]); - - const current = new Client(currentConfiguration); - const next = answers.url ? new Client({ node: answers.url }) : current; - - return answers.current === "source" - ? new ConnectorContext(context, current, next) - : new ConnectorContext(context, next, current); -} - -module.exports = async (context) => { - if (promise === null) { - promise = getEsClient(context); - } - - return promise; -}; diff --git a/bin/.upgrades/connectors/redis.js b/bin/.upgrades/connectors/redis.js deleted file mode 100644 index 3829b20259..0000000000 --- a/bin/.upgrades/connectors/redis.js +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const { formatWithOptions } = require("util"), - IORedis = require("ioredis"), - ConnectorContext = require("../lib/connectorContext"), - _ = require("lodash"); - -let promise = null; - -async function getRedisClient(context) { - const currentConfiguration = _.get(context.config, "services.internalCache"); - - if (!currentConfiguration) { - context.log.error("Missing Kuzzle configuration for Redis."); - context.log.error("Missing configuration value: services.internalCache"); - context.log.error("Aborted."); - process.exit(1); - } - - context.log.notice("Current Redis configuration:"); - context.log.print( - formatWithOptions({ colors: false, depth: null }, currentConfiguration), - ); - - const current = await context.inquire.direct({ - choices: ["source", "target", "source and target"], - default: "target", - message: "For this migration, use this current instance as the data", - type: "list", - }); - - const remaining = current === "source" ? "target" : "source"; - let answers = null; - - if (current !== "source and target") { - answers = await context.inquire.prompt([ - { - default: "", - message: `${remaining} server name or IP:`, - name: "server", - type: "input", - validate: (name) => name.length > 0 || "Non-empty string expected", - }, - { - default: "", - message: `${remaining} server port:`, - name: "port", - type: "input", - validate: (name) => { - const i = Number.parseFloat(name); - - if (!Number.isNaN(i) && Number.isInteger(i) && i > 1 && i <= 65535) { - return true; - } - - return "Invalid port number"; - }, - }, - ]); - } - - const options = { enableReadyCheck: true, lazyConnect: true }, - client = currentConfiguration.nodes - ? new IORedis.Cluster(currentConfiguration, options) - : new IORedis(currentConfiguration.node, options); - - await client.connect(); - - let next; - - if (answers) { - next = new IORedis(answers.port, answers.server, options); - await next.connect(); - } else { - next = client; - } - - return current === "source" - ? new ConnectorContext(context, client, next) - : new ConnectorContext(context, next, client); -} - -module.exports = async (context) => { - if (promise === null) { - promise = getRedisClient(context); - } - - return promise; -}; diff --git a/bin/.upgrades/lib/connectorContext.js b/bin/.upgrades/lib/connectorContext.js deleted file mode 100644 index 515cd4ad4a..0000000000 --- a/bin/.upgrades/lib/connectorContext.js +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const UpgradeContext = require("./context"); - -class ConnectorContext extends UpgradeContext { - constructor(context, source, target) { - super(context); - this.source = source; - this.target = target; - } - - get inPlace() { - return this.source === this.target; - } -} - -module.exports = ConnectorContext; diff --git a/bin/.upgrades/lib/context.js b/bin/.upgrades/lib/context.js deleted file mode 100644 index 1f9e4fdc6b..0000000000 --- a/bin/.upgrades/lib/context.js +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const fs = require("fs"); -const path = require("path"); - -const rc = require("rc"); - -const inquirer = require("./inquirerExtended"); -const Logger = require("./logger"); - -const defaultConfiguration = require("../../../lib/config/default.config"); -const { version: currentVersion } = require("../../../package.json"); - -class Version { - constructor() { - this.from = null; - this.list = []; - } -} - -class UpgradeContext { - constructor(args) { - // copy constructor - if (args instanceof UpgradeContext) { - this.config = args.config; - this.log = args.log; - this.inquire = args.inquire; - this.version = args.version; - this.argv = args.argv; - } else { - this.config = null; - this.log = new Logger(args); - this.inquire = inquirer; - this.version = null; - this.argv = args; - } - } - - async init() { - await this.loadConfiguration(); - - if (this.config.configs) { - this.log.ok("Configuration files loaded:"); - this.config.configs.forEach((f) => this.log.print(`\t- ${f}`)); - } - - this.version = await this.getVersions(); - } - - async loadConfiguration() { - let cfg; - - try { - cfg = rc("kuzzle", JSON.parse(JSON.stringify(defaultConfiguration))); - this.config = cfg; - return; - } catch (e) { - this.log.error(`Cannot load configuration files: ${e.message}`); - if (this.config === null) { - this.log.error( - "Check your configuration files, and restart the upgrade script.", - ); - process.exit(1); - } - } - - // If we are here, this means that an error was thrown, due to a change made - // to configuration files *during* the upgrade (probably because a version - // upgrade asked the user to modify their configuration files manually) - // To prevent aborting unnecessarily during the upgrade process, we ask the - // user to fix the situation - const retry = await this.inquire.direct({ - default: true, - message: "Retry?", - type: "confirm", - }); - - if (!retry) { - this.log.error("Aborted by user action."); - process.exit(1); - } - - await this.loadConfiguration(); - } - - /** - * Asks the user the source version to upgrade from - * @returns {Version} - */ - async getVersions() { - const version = new Version(); - - this.log.print(`Current Kuzzle version: ${currentVersion}`); - - version.list = fs - .readdirSync(path.resolve(`${__dirname}/../versions`), { - withFileTypes: true, - }) - .filter((entry) => entry.isDirectory() && entry.name.match(/^v\d+$/)) - .map((entry) => entry.name) - .sort( - (a, b) => parseInt(a[0].substring(1)) - parseInt(b[0].substring(1)), - ); - - if (version.list.length === 1) { - version.from = version.list[0]; - } else { - version.from = await inquirer.direct({ - choices: version.list, - default: version.list[version.list.length - 1], - message: "Migrate from which version", - type: "list", - }); - - version.list = version.list.slice(version.list.indexOf(version.from)); - } - - return version; - } -} - -module.exports = UpgradeContext; diff --git a/bin/.upgrades/lib/formatters.js b/bin/.upgrades/lib/formatters.js deleted file mode 100644 index ae360ef01f..0000000000 --- a/bin/.upgrades/lib/formatters.js +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const clc = require("cli-color"); - -class RawFormatter { - raw(msg) { - return msg; - } -} - -class ColoredFormatter extends RawFormatter { - error(msg) { - return clc.red(msg); - } - - warn(msg) { - return clc.yellow(msg); - } - - notice(msg) { - return clc.cyan(msg); - } - - ok(msg) { - return clc.green(msg); - } - - question(msg) { - return clc.whiteBright(msg); - } -} - -class PrefixedFormatter extends RawFormatter { - error(msg) { - return `[ERROR] ${msg}`; - } - - warn(msg) { - return `[WARN] ${msg}`; - } - - notice(msg) { - return `[i] ${msg}`; - } - - ok(msg) { - return `[OK] ${msg}`; - } - - question(msg) { - return `[?] ${msg}`; - } -} - -class FileFormatter extends PrefixedFormatter { - error(msg) { - return Buffer.from(`[${new Date().toISOString()}]${super.error(msg)}\n`); - } - - warn(msg) { - return Buffer.from(`[${new Date().toISOString()}]${super.warn(msg)}\n`); - } - - notice(msg) { - return Buffer.from(`[${new Date().toISOString()}]${super.notice(msg)}\n`); - } - - ok(msg) { - return Buffer.from(`[${new Date().toISOString()}]${super.ok(msg)}\n`); - } - - question(msg) { - return Buffer.from(`[${new Date().toISOString()}]${super.question(msg)}\n`); - } - - // @override - raw(msg) { - return Buffer.from(`${msg}\n`); - } -} - -module.exports = { ColoredFormatter, FileFormatter, PrefixedFormatter }; diff --git a/bin/.upgrades/lib/inquirerExtended.js b/bin/.upgrades/lib/inquirerExtended.js deleted file mode 100644 index 65ff08c368..0000000000 --- a/bin/.upgrades/lib/inquirerExtended.js +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -// Returns an instance of "inquirer" extended with a "direct" function that -// returns the answer directly, instead of a key-value map of answers. -// Useful only because we often prompt questions one by one, with tasks in the -// middle, and this "direct" function helps reducing the clutter. - -const assert = require("assert").strict, - inquirer = require("inquirer"), - _ = require("lodash"); - -inquirer.direct = async function direct(prompt) { - assert( - _.isPlainObject(prompt), - "Invalid argument: expected a non-empty object", - ); - assert( - typeof prompt.name === "undefined", - 'Unexpected "name" argument: if you need to set a name, use inquirer.prompt', - ); - - const p = _.cloneDeep(prompt); - p.name = "foo"; - - const { foo } = await inquirer.prompt(p); - - return foo; -}; - -module.exports = inquirer; diff --git a/bin/.upgrades/lib/logger.js b/bin/.upgrades/lib/logger.js deleted file mode 100644 index 08892989cb..0000000000 --- a/bin/.upgrades/lib/logger.js +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const fs = require("fs"), - { - ColoredFormatter, - PrefixedFormatter, - FileFormatter, - } = require("./formatters"); - -class ColorOutput { - constructor(opts) { - this.terminalFormatter = opts.C - ? new PrefixedFormatter() - : new ColoredFormatter(); - - this.fileFormatter = new FileFormatter(); - - this.fileReport = null; - - if (!opts.R) { - this.notice(`Upgrade report file: ${opts.output}`); - this.fileReport = fs.openSync(opts.output, "w", 0o600); - } - } - - /* eslint-disable no-console */ - error(str) { - console.error(this.terminalFormatter.error(str)); - - if (this.fileReport) { - fs.writeSync(this.fileReport, this.fileFormatter.error(str)); - } - } - - warn(str) { - console.warn(this.terminalFormatter.warn(str)); - - if (this.fileReport) { - fs.writeSync(this.fileReport, this.fileFormatter.warn(str)); - } - } - - notice(str) { - console.log(this.terminalFormatter.notice(str)); - - if (this.fileReport) { - fs.writeSync(this.fileReport, this.fileFormatter.notice(str)); - } - } - - question(str) { - console.log(this.terminalFormatter.question(str)); - - if (this.fileReport) { - fs.writeSync(this.fileReport, this.fileFormatter.question(str)); - } - } - - ok(str) { - console.log(this.terminalFormatter.ok(str)); - - if (this.fileReport) { - fs.writeSync(this.fileReport, this.fileFormatter.ok(str)); - } - } - - print(str) { - console.log(this.terminalFormatter.raw(str)); - - if (this.fileReport) { - fs.writeSync(this.fileReport, this.fileFormatter.raw(str)); - } - } - /* eslint-enable no-console */ -} - -module.exports = ColorOutput; diff --git a/bin/.upgrades/lib/progressBar.js b/bin/.upgrades/lib/progressBar.js deleted file mode 100644 index b53b0ed1ef..0000000000 --- a/bin/.upgrades/lib/progressBar.js +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const moment = require("moment"); - -// Simple progress bar making the wait for long tasks more bearable -class ProgressBar { - constructor(context, text, total, barSize = 20) { - this.text = text; - this.total = total; - this.barSize = barSize; - this.bar = new context.inquire.ui.BottomBar(); - this.update(0); - this.start = Date.now(); - } - - destroy() { - this.bar.updateBottomBar(""); - this.bar.close(); - } - - update(count) { - const remaining = this._getRemainingTime(count), - str = `${this.text} -${this._getBar(count)}(remaining: ${remaining}) ${count} / ${this.total}`; - - this.bar.updateBottomBar(str); - } - - _getRemainingTime(count) { - const elapsed = Date.now() - this.start, - remaining = - count > 0 ? Math.round((this.total * elapsed) / count) - elapsed : 0; - - return moment(remaining).format("mm:ss"); - } - - _getBar(count) { - const percent = (count * 100) / this.total, - progress = Math.round((percent * this.barSize) / 100); - - return ( - "[" + "#".repeat(progress) + "-".repeat(this.barSize - progress) + "]" - ); - } -} - -module.exports = ProgressBar; diff --git a/bin/.upgrades/versions/v1/checkConfiguration.js b/bin/.upgrades/versions/v1/checkConfiguration.js deleted file mode 100644 index 6b2c93c393..0000000000 --- a/bin/.upgrades/versions/v1/checkConfiguration.js +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const _ = require("lodash"); - -module.exports = async function check(context) { - let action = false; - const warn = (msg) => context.log.warn(`[CONFIG. FILES] ${msg}`); - const renamed = { - "services.db": "services.storageEngine", - "services.internalEngine": "services.internalIndex", - "services.storageEngine.commonMapping._kuzzle_info": - "services.storageEngine.commonMapping.properties._kuzzle_info", - "services.storageEngine.dynamic": - "services.storageEngine.commonMapping.dynamic", - }; - const deprecated = [ - "server.entryPoints", - "server.protocols.socketio", - "server.proxy", - "services.garbageCollector", - "services.storageEngine.client.apiVersion", - "services.storageEngine.commonMapping.properties._kuzzle_info.deletedAt", - "services.storageEngine.commonMapping.properties._kuzzle_info.active", - ]; - - for (const [oldName, newName] of Object.entries(renamed)) { - if (_.get(context.config, oldName)) { - action = true; - warn(`The configuration key "${oldName}" is now named "${newName}"`); - } - } - - for (const name of deprecated) { - if (_.get(context.config, name)) { - action = true; - warn(`The configuration key "${name}" is obsolete and should be removed`); - } - } - - if (action) { - const choices = ["Check again", "Abort", "Ignore (not recommended)"], - proceed = await context.inquire.direct({ - choices, - default: choices[0], - message: "Configuration files need to be updated:", - type: "list", - }); - - if (proceed === choices[0]) { - await context.loadConfiguration(); - return check(context); - } - - if (proceed === choices[1]) { - process.exit(1); - } - } - - context.log.ok("Configuration files checked: OK"); -}; diff --git a/bin/.upgrades/versions/v1/index.js b/bin/.upgrades/versions/v1/index.js deleted file mode 100644 index 47350723b3..0000000000 --- a/bin/.upgrades/versions/v1/index.js +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const checkConfiguration = require("./checkConfiguration"); -const upgradeStorage = require("./upgradeStorage"); -const upgradeCache = require("./upgradeCache"); - -module.exports = async function upgrade(context) { - context.log.notice("\n\n=== CONFIGURATION FILES ==="); - await checkConfiguration(context); - context.log.notice("\n\n=== STORAGE ==="); - await upgradeStorage(context); - context.log.notice("\n\n=== CACHE ==="); - await upgradeCache(context); -}; diff --git a/bin/.upgrades/versions/v1/upgradeCache.js b/bin/.upgrades/versions/v1/upgradeCache.js deleted file mode 100644 index 30f45d6ed8..0000000000 --- a/bin/.upgrades/versions/v1/upgradeCache.js +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const getRedisConnector = require("../../connectors/redis"); - -async function copyKey(context, key) { - const dump = await context.source.dumpBuffer(key), - ttl = Math.max(0, await context.source.pttl(key)); - - // Breaking change from v1 to v2, due to how indexes are handled: - // token keys are prefixed "repos/%kuzzle/token" in v1, and - // "repos/kuzzle/token" in v2 - const newKey = key.replace("repos/%kuzzle/token", "repos/kuzzle/token"); - - await context.target.restore(newKey, ttl, dump, "REPLACE"); -} - -async function getSourceKeys(context, pattern) { - if (!context.source.nodes) { - return context.source.keys(pattern); - } - - const keys = []; - - for (const node of context.source.nodes("master")) { - keys.push(...(await node.source.keys(pattern))); - } - - return keys; -} - -async function copyDatabase(context, db) { - await context.source.select(db); - await context.target.select(db); - - await context.target.flushdb(); - - const keys = await getSourceKeys(context, "*"); - - for (const key of keys) { - await copyKey(context, key); - } - - context.log.ok(`Imported cache keys from database ${db}`); -} - -async function inPlaceMigration(context) { - context.log.notice(` -In-place migration detected: this script will make the changes necessary to -make the cache data compatible with Kuzzle v2.`); - - const choices = { - abort: "Abort", - copy: "Copy to new keys (obsolete keys will be delete once expired)", - move: "Move keys (cannot be undone, cache won't work with Kuzzle v1 anymore)", - }, - action = await context.inquire.direct({ - choices: Object.values(choices), - default: choices.copy, - message: "Select how the database should be migrated:", - type: "list", - }); - - if (action === choices.abort) { - context.log.error("Aborted by user."); - process.exit(0); - } - - const db = context.config.services.internalCache.database || 0; - - await context.source.select(db); - - const keys = await getSourceKeys(context, "repos/*"); - - for (const key of keys) { - await copyKey(context, key); - - if (action === choices.move) { - await context.source.del(key); - } - } -} - -async function upgradeToTarget(context) { - context.log.notice(` -This script will WIPE TARGET DATABASES from the target cache instance. -Then, it will COPY all data from the source cache instance, without altering it -in any way.`); - - const confirm = await context.inquire.direct({ - default: true, - message: "Continue?", - type: "confirm", - }); - - if (!confirm) { - context.log.error("Aborted by user."); - process.exit(0); - } - - for (const cachedb of ["internalCache", "memoryStorage"]) { - const config = context.config.services[cachedb]; - - await copyDatabase(context, config.database || 0); - } -} - -module.exports = async function upgradeCache(context) { - const cacheContext = await getRedisConnector(context); - - try { - if (cacheContext.inPlace) { - await inPlaceMigration(cacheContext); - } else { - await upgradeToTarget(cacheContext); - } - - cacheContext.log.ok("Cache import complete."); - } catch (e) { - cacheContext.log.error(`Cache import failure: ${e.message}`); - cacheContext.log.print(e.stack); - cacheContext.log.error("Aborted."); - process.exit(1); - } -}; diff --git a/bin/.upgrades/versions/v1/upgradeStorage.js b/bin/.upgrades/versions/v1/upgradeStorage.js deleted file mode 100644 index 391e69d8c8..0000000000 --- a/bin/.upgrades/versions/v1/upgradeStorage.js +++ /dev/null @@ -1,468 +0,0 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -const _ = require("lodash"); - -const getESConnector = require("../../connectors/es"); -const ProgressBar = require("../../lib/progressBar"); - -const INTERNAL_PREFIX = "%"; -const PUBLIC_PREFIX = "&"; -const NAME_SEPARATOR = "."; - -function transformProfile(profile) { - if (!Array.isArray(profile.policies)) { - return profile; - } - - for (const policy of profile.policies.filter((p) => p.restrictedTo)) { - for (const restriction of policy.restrictedTo.filter( - (r) => r.collections, - )) { - restriction.collections = restriction.collections.map((c) => - c.toLowerCase(), - ); - } - } - return profile; -} - -function getNewIndexName(index, collection) { - const prefix = index[0] === "%" ? "" : PUBLIC_PREFIX; - - return `${prefix}${index}${NAME_SEPARATOR}${collection}`; -} - -function fixIndexName(context, index, collection, newIndex) { - const lowercased = newIndex.toLowerCase(); - - if (lowercased !== newIndex) { - // uppercase letters were already forbidden in index names - context.log.warn( - `Index "${index}": collection "${collection}" has been renamed to "${collection.toLowerCase()}"`, - ); - } - - return lowercased; -} - -async function moveData(context, index, collection, newIndex, transform) { - let page = await context.source.search({ - body: { sort: ["_doc"] }, - index, - scroll: "1m", - size: context.argv.storagePageSize, - type: collection, - }); - - const total = page.body.hits.total; - const progressBar = new ProgressBar( - context, - `Importing: ${index}/${collection}`, - total, - ); - let moved = 0; - - while (moved < total) { - const bulk = []; - - for (let i = 0; i < page.body.hits.hits.length; i++) { - const doc = page.body.hits.hits[i]; - - if (doc._source._kuzzle_info) { - delete doc._source._kuzzle_info.active; - delete doc._source._kuzzle_info.deletedAt; - } - - if (transform) { - doc._source = transform(doc._source); - } - - bulk.push({ - create: { - _id: doc._id, - _index: newIndex, - _type: context._type, - }, - }); - bulk.push(doc._source); - } - - await context.target.bulk({ _source: false, body: bulk }); - - moved += page.body.hits.hits.length; - - progressBar.update(moved); - - if (moved < total) { - page = await context.source.scroll({ - scroll: "1m", - scroll_id: page.body._scroll_id, - }); - } - } - - progressBar.destroy(); - return total; -} - -async function upgradeMappings(context, index, collection, newIndex) { - const mappingsResponse = await context.source.indices.getMapping({ - index, - type: collection, - }); - const mappings = mappingsResponse.body[index].mappings[collection]; - - // replace obsolete mapping properties - if (mappings.properties && mappings.properties._kuzzle_info) { - mappings.properties._kuzzle_info = - context.config.services.storageEngine.commonMapping.properties._kuzzle_info; - } - - await context.target.indices.putMapping({ - body: { - _meta: mappings._meta, - dynamic: mappings.dynamic || false, - properties: mappings.properties, - }, - index: newIndex, - type: context._type, - }); -} - -async function createNewIndex(context, newIndex) { - const exists = await context.target.indices.exists({ index: newIndex }); - - if (exists.body) { - await context.target.indices.delete({ index: newIndex }); - } - - await context.target.indices.create({ index: newIndex }); -} - -async function upgrade(context, index, collection, newIndex) { - const fixedIndexName = fixIndexName(context, index, collection, newIndex); - - await createNewIndex(context, fixedIndexName); - await upgradeMappings(context, index, collection, fixedIndexName); - - return moveData(context, index, collection, fixedIndexName); -} - -async function upgradeInternalStorage(context) { - const config = context.config.services.storageEngine.internalIndex; - const index = `${INTERNAL_PREFIX}${config.name}`; - const mapconfig = config.collections; - const collections = { - config: mapconfig.config, - profiles: mapconfig.profiles, - roles: mapconfig.roles, - users: null, - validations: mapconfig.validations, - }; - - for (const [collection, mappings] of Object.entries(collections)) { - const newIndex = getNewIndexName(index, collection); - let total; - - if (mappings) { - await createNewIndex(context, newIndex); - await context.target.indices.putMapping({ - body: mappings, - index: newIndex, - type: context._type, - }); - - total = await moveData( - context, - index, - collection, - newIndex, - collection === "profiles" && transformProfile, - ); - } else { - total = await upgrade(context, index, collection, newIndex); - } - - context.log.ok( - `... migrated internal data: ${collection} (${total} documents)`, - ); - } - - // bootstrap document - await context.target.create({ - body: { version: "2.0.0" }, - id: "internalIndex.dataModelVersion", - index: `${index}.config`, - type: context._type, - }); - - await context.target.create({ - body: { timestamp: Date.now() }, - id: `${config.name}.done`, - index: `${index}.config`, - type: context._type, - }); -} - -async function upgradePluginsStorage(context) { - const { body } = await context.source.cat.indices({ format: "json" }); - const indexes = body - .map((b) => b.index) - .filter((n) => n.startsWith("%plugin:")); - - for (const index of indexes) { - const plugin = index.split(":")[1]; - const newIndexBase = `%plugin-${plugin}${NAME_SEPARATOR}`; - const mappings = await context.source.indices.getMapping({ index }); - const collections = Object.keys(mappings.body[index].mappings); - - for (const collection of collections) { - const newIndex = newIndexBase + collection; - const total = await upgrade(context, index, collection, newIndex); - - context.log.ok( - `... migrated storage for plugin ${plugin}: ${collection} (${total} documents)`, - ); - } - } -} - -async function upgradeAliases(context, upgraded) { - const response = await context.source.indices.getAlias({ - index: Object.keys(upgraded), - }); - - const aliases = {}; - for (const [index, obj] of Object.entries(response.body)) { - if (Object.keys(obj.aliases).length > 0) { - for (const newIndex of upgraded[index].targets) { - aliases[newIndex] = obj.aliases; - } - } - } - - if (Object.keys(aliases).length === 0) { - return; - } - - context.log.notice(` -Index aliases detected. This script can import them to the new structure, but -due to the removal of native collections in Elasticsearch, future aliases will -be duplicated across all of an index upgraded collections.`); - - const choice = await context.inquire.direct({ - default: false, - message: "Upgrade aliases?", - type: "confirm", - }); - - if (!choice) { - return; - } - - for (const [index, obj] of Object.entries(aliases)) { - for (const [name, body] of Object.entries(obj)) { - await context.target.indices.putAlias({ - _type: context._type, - body, - index, - name, - }); - context.log.ok(`...... alias ${name} on index ${index} upgraded`); - } - } -} - -async function upgradeDataStorage(context) { - const { body } = await context.source.cat.indices({ format: "json" }); - const upgraded = {}; - let indexes = body - .map((b) => b.index) - .filter((n) => !n.startsWith(INTERNAL_PREFIX)); - - context.log.notice( - `There are ${indexes.length} data indexes that can be upgraded`, - ); - const choices = { - all: "upgrade all indexes", - askCollection: "choose which collections can be upgraded", - askIndex: "choose which indexes can be upgraded", - skip: "skip all data index upgrades", - }; - - const action = await context.inquire.direct({ - choices: Object.values(choices), - default: choices.all, - message: "You want to", - type: "list", - }); - - if (action === choices.skip) { - return; - } - - if (action === choices.askIndex) { - indexes = await context.inquire.direct({ - choices: indexes.map((i) => ({ checked: true, name: i })), - message: "Select the indexes to upgrade:", - type: "checkbox", - }); - } - - for (const index of indexes) { - const mappings = await context.source.indices.getMapping({ index }); - const allCollections = Object.keys(mappings.body[index].mappings); - let collections = allCollections; - - if (action === choices.askCollection) { - context.log.notice(`Starting to upgrade the index ${index}`); - collections = await context.inquire.direct({ - choices: collections.map((c) => ({ checked: true, name: c })), - message: "Select the collections to upgrade:", - type: "checkbox", - }); - } - - upgraded[index] = { - canBeRemoved: collections.length === allCollections.length, - targets: [], - }; - - for (const collection of collections) { - const newIndex = getNewIndexName(index, collection); - const total = await upgrade(context, index, collection, newIndex); - - upgraded[index].targets.push(newIndex); - context.log.ok( - `... migrated data index ${index}: ${collection} (${total} documents)`, - ); - } - } - - await upgradeAliases(context, upgraded); - - return upgraded; -} - -async function destroyPreviousStructure(context, upgraded) { - // there is no point in destroying the previous structure if not performing - // an in-place migration - if (!context.inPlace) { - return; - } - - const { body } = await context.source.cat.indices({ format: "json" }); - const plugins = body - .map((b) => b.index) - .filter((n) => n.startsWith("%plugin:")); - - let indexes = [ - "%kuzzle", - ...plugins, - ...Object.keys(upgraded).filter((i) => upgraded[i].canBeRemoved), - ]; - - context.log.notice( - "Since this is an in-place migration, the previous structure can be removed.", - ); - context.log.notice( - "(only data indexes with ALL their collections upgraded can be deleted)", - ); - - const choices = { - everything: "Yes - remove all upgraded structures", - internal: "Remove only Kuzzle internal data", - kuzzleAndPlugins: "Remove Kuzzle internal data and plugins storages", - no: "No - keep everything as is", - }; - - const action = await context.inquire.direct({ - choices: Object.values(choices), - default: choices[0], - message: "Destroy? (THIS CANNOT BE REVERTED)", - type: "list", - }); - - if (action === choices.no) { - context.log.ok("Previous structure left intact."); - return; - } - - if (action === choices.kuzzleAndPlugins) { - indexes = ["%kuzzle", ...plugins]; - } else if (action === choices.internal) { - indexes = ["%kuzzle"]; - } - - await context.source.indices.delete({ index: indexes }); - context.log.ok("Previous structure destroyed."); -} - -module.exports = async function upgradeStorage(context) { - const storageContext = await getESConnector(context); - const targetInfo = await storageContext.target.info(); - const targetMajor = targetInfo.body.version.number.split(".")[0]; - - storageContext._type = - storageContext.inPlace && targetMajor === "5" ? "default" : undefined; - - context.log.notice(` -This script will now start *COPYING* the existing data to the target storage -space. -If the upgrade is interrupted, this script can be replayed any number of times. -Existing data from the older version of Kuzzle will be unaffected, but if -Kuzzle indexes already exist in the target storage space, they will be -overwritten without notice.`); - - const confirm = await context.inquire.direct({ - default: true, - message: "Continue?", - type: "confirm", - }); - - if (!confirm) { - context.log.error("Aborted by user."); - process.exit(0); - } - - try { - await upgradeInternalStorage(storageContext); - await upgradePluginsStorage(storageContext); - const upgraded = await upgradeDataStorage(storageContext); - - storageContext.log.ok("Storage migration complete."); - await destroyPreviousStructure(storageContext, upgraded); - } catch (e) { - storageContext.log.error(`Storage upgrade failure: ${e.message}`); - - const reason = _.get(e, "meta.body.error.reason"); - if (reason) { - storageContext.log.error(`Reason: ${reason}`); - } - - storageContext.log.print(e.stack); - storageContext.log.error("Aborted."); - process.exit(1); - } -}; diff --git a/bin/upgrade b/bin/upgrade deleted file mode 100755 index 214914f47a..0000000000 --- a/bin/upgrade +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env node - -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -const yargs = require('yargs'); -const Context = require('./.upgrades/lib/context'); - -const args = yargs - .usage('USAGE: $0 [OPTIONS]') - .options({ - C: { - default: false, - describe: 'Disable colored messages in the terminal', - group: 'General', - type: 'boolean', - }, - R: { - default: false, - describe: 'Disable upgrades report in a log file', - group: 'General', - type: 'boolean', - }, - output: { - alias: 'o', - default: `upgrade-${Date.now()}.report`, - describe: 'Upgrade logs file name', - group: 'General', - type: 'string', - }, - 'storage-page-size': { - default: 50, - describe: 'Size of a storage page when moving data around', - group: 'Advanced', - type: 'number', - }, - }) - .strict(true) - .parse(); - -const context = new Context(args); - -process.removeAllListeners('unhandledRejection'); -process.on('unhandledRejection', reason => { - if (reason !== undefined) { - context.log.error(reason.message); - context.log.error(reason.stack); - } - else { - context.log.error('FATAL: unexpected unhandled promise rejection'); - } - - process.exit(1); -}); - -(async () => { - await context.init(); - - for (const dir of context.version.list) { - context.log.print(`Upgrading from Kuzzle ${dir}...`); - - await require(`./.upgrades/versions/${dir}`)(context); - } - - process.exit(0); -})(); diff --git a/docker/scripts/start-kuzzle-test.ts b/docker/scripts/start-kuzzle-test.ts index 12cf6e6662..3eb5fac8c6 100644 --- a/docker/scripts/start-kuzzle-test.ts +++ b/docker/scripts/start-kuzzle-test.ts @@ -356,8 +356,8 @@ app.errors.register("app", "api", "custom", { app.hook.register( "generic:document:afterUpdate", - async (documents, request: KuzzleRequest) => { - await app.sdk.document.createOrReplace( + (documents, request: KuzzleRequest): void => { + app.sdk.document.createOrReplace( request.getIndex(), request.getCollection(), "generic:document:afterUpdate", diff --git a/lib/config/default.config.ts b/lib/config/default.config.ts index 2773572f91..557734e2d1 100644 --- a/lib/config/default.config.ts +++ b/lib/config/default.config.ts @@ -284,6 +284,7 @@ const defaultConfig: KuzzleConfiguration = { bootstrapLockTimeout: 60000, }, storageEngine: { + majorVersion: 7, aliases: ["storageEngine"], backend: "elasticsearch", client: { diff --git a/lib/core/backend/backendStorage.ts b/lib/core/backend/backendStorage.ts index 649876bc92..5036dcba70 100644 --- a/lib/core/backend/backendStorage.ts +++ b/lib/core/backend/backendStorage.ts @@ -19,19 +19,13 @@ * limitations under the License. */ -import { Client } from "@elastic/elasticsearch"; - -import Elasticsearch from "../../service/storage/elasticsearch"; +import { Elasticsearch } from "../../service/storage/Elasticsearch"; import { JSONObject } from "../../../index"; -import { ApplicationManager, Backend } from "./index"; +import { ApplicationManager } from "./index"; export class BackendStorage extends ApplicationManager { - private _client: Client = null; - private _Client: new (clientConfig?: any) => Client = null; - - constructor(application: Backend) { - super(application); - } + private _client: any = null; + private _Client: new (clientConfig?: any) => any = null; /** * Storage client constructor. @@ -39,16 +33,11 @@ export class BackendStorage extends ApplicationManager { * * @param clientConfig Overload configuration for the underlaying storage client */ - get StorageClient(): new (clientConfig?: any) => Client { + get StorageClient(): new (clientConfig?: any) => any { if (!this._Client) { - const kuzzle = this._kuzzle; - this._Client = function ESClient(clientConfig: JSONObject = {}) { - return Elasticsearch.buildClient({ - ...kuzzle.config.services.storageEngine.client, - ...clientConfig, - }); - } as any; + return this.getElasticsearchClient(clientConfig); + } as unknown as new (clientConfig?: any) => any; } return this._Client; @@ -58,13 +47,18 @@ export class BackendStorage extends ApplicationManager { * Access to the underlaying storage engine client. * (Currently Elasticsearch) */ - get storageClient(): Client { + get storageClient(): any { if (!this._client) { - this._client = Elasticsearch.buildClient( - this._kuzzle.config.services.storageEngine.client, - ); + this._client = this.getElasticsearchClient(); } return this._client; } + + getElasticsearchClient(clientConfig?: JSONObject): any { + return Elasticsearch.buildClient( + { ...this._kuzzle.config.services.storageEngine.client, ...clientConfig }, + this._kuzzle.config.services.storageEngine.majorVersion + ); + } } diff --git a/lib/core/plugin/pluginContext.ts b/lib/core/plugin/pluginContext.ts index a14e4b7dca..ad0a883e37 100644 --- a/lib/core/plugin/pluginContext.ts +++ b/lib/core/plugin/pluginContext.ts @@ -21,13 +21,12 @@ import Bluebird from "bluebird"; import { Koncorde } from "../shared/KoncordeWrapper"; -import { Client } from "@elastic/elasticsearch"; import { JSONObject } from "kuzzle-sdk"; import { EmbeddedSDK } from "../shared/sdk/embeddedSdk"; import PluginRepository from "./pluginRepository"; import Store from "../shared/store"; -import Elasticsearch from "../../service/storage/elasticsearch"; +import { Elasticsearch } from "../../service/storage/Elasticsearch"; import { isPlainObject } from "../../util/safeObject"; import Promback from "../../util/promback"; import { Mutex } from "../../util/mutex"; @@ -130,7 +129,7 @@ export class PluginContext { connectionId: string, index: string, collection: string, - filters: JSONObject, + filters: JSONObject ) => Promise<{ roomId: string }>; /** @@ -139,7 +138,7 @@ export class PluginContext { unregister: ( connectionId: string, roomId: string, - notify: boolean, + notify: boolean ) => Promise; }; @@ -205,7 +204,7 @@ export class PluginContext { /** * Constructor for Elasticsearch SDK Client */ - ESClient: typeof Client; + ESClient: new () => any; }; /** @@ -283,7 +282,7 @@ export class PluginContext { // eslint-disable-next-line no-inner-declarations function PluginContextRepository( collection: string, - ObjectConstructor: any = null, + ObjectConstructor: any = null ) { if (!collection) { throw contextError.get("missing_collection"); @@ -305,21 +304,20 @@ export class PluginContext { } as Repository; } - // eslint-disable-next-line no-inner-declarations - function PluginContextESClient(): Client { + function PluginContextESClient(): any { return Elasticsearch.buildClient( - global.kuzzle.config.services.storageEngine.client, + global.kuzzle.config.services.storageEngine.client ); } this.constructors = { BaseValidationType: require("../validation/baseType"), - ESClient: PluginContextESClient as unknown as new () => Client, + ESClient: PluginContextESClient as any, Koncorde: Koncorde as any, Mutex: Mutex, Repository: PluginContextRepository as unknown as new ( collection: string, - objectConstructor: any, + objectConstructor: any ) => Repository, Request: instantiateRequest as any, RequestContext: RequestContext as any, @@ -369,7 +367,7 @@ export class PluginContext { }, { connectionId: connectionId, - }, + } ); return global.kuzzle.ask("core:realtime:subscribe", request); }, @@ -378,17 +376,17 @@ export class PluginContext { "core:realtime:unsubscribe", connectionId, roomId, - notify, + notify ), }, trigger: (eventName, payload) => global.kuzzle.pipe(`plugin-${pluginName}:${eventName}`, payload), validation: { addType: global.kuzzle.validation.addType.bind( - global.kuzzle.validation, + global.kuzzle.validation ), validate: global.kuzzle.validation.validate.bind( - global.kuzzle.validation, + global.kuzzle.validation ), }, }; @@ -423,7 +421,7 @@ function execute(request, callback) { ["subscribe", "unsubscribe"].includes(request.input.action) ) { return promback.reject( - contextError.get("unavailable_realtime", request.input.action), + contextError.get("unavailable_realtime", request.input.action) ); } @@ -497,11 +495,10 @@ function instantiateRequest(request, data, options = {}) { } if (_data) { - target.input.volatile = Object.assign( - {}, - _request.input.volatile, - _data.volatile, - ); + target.input.volatile = { + ..._request.input.volatile, + ..._data.volatile, + }; } else { target.input.volatile = _request.input.volatile; } diff --git a/lib/core/storage/clientAdapter.js b/lib/core/storage/clientAdapter.js index 4374cd94d9..39bb33d429 100644 --- a/lib/core/storage/clientAdapter.js +++ b/lib/core/storage/clientAdapter.js @@ -21,7 +21,7 @@ "use strict"; -const Elasticsearch = require("../../service/storage/elasticsearch"); +const { Elasticsearch } = require("../../service/storage/Elasticsearch"); const { IndexCache } = require("./indexCache"); const { isPlainObject } = require("../../util/safeObject"); const kerror = require("../../kerror"); @@ -38,16 +38,17 @@ class ClientAdapter { * @param {storeScopeEnum} scope */ constructor(scope) { - this.client = new Elasticsearch( + this.es = new Elasticsearch( global.kuzzle.config.services.storageEngine, scope, ); + this.client = this.es.client; this.scope = scope; this.cache = new IndexCache(); } async init() { - await this.client.init(); + await this.es.init(); await this.populateCache(); this.registerCollectionEvents(); diff --git a/lib/core/storage/storageEngine.js b/lib/core/storage/storageEngine.js index e765bafbff..46500e6f1f 100644 --- a/lib/core/storage/storageEngine.js +++ b/lib/core/storage/storageEngine.js @@ -44,9 +44,9 @@ class StorageEngine { async init() { await Bluebird.all([this.public.init(), this.private.init()]); - const privateIndexes = await this.private.cache.listIndexes(); + const privateIndexes = this.private.cache.listIndexes(); - for (const publicIndex of await this.public.cache.listIndexes()) { + for (const publicIndex of this.public.cache.listIndexes()) { if (privateIndexes.includes(publicIndex)) { throw kerror.get("index_already_exists", "public", publicIndex); } diff --git a/lib/kuzzle/kuzzle.ts b/lib/kuzzle/kuzzle.ts index 7f32b4b930..7f93c2eeee 100644 --- a/lib/kuzzle/kuzzle.ts +++ b/lib/kuzzle/kuzzle.ts @@ -395,7 +395,7 @@ class Kuzzle extends KuzzleEventEmitter { * @returns {Promise} */ async install(installations: InstallationConfig[]): Promise { - if (!installations || !installations.length) { + if (!installations?.length) { return; } diff --git a/lib/service/storage/7/elasticsearch.js b/lib/service/storage/7/elasticsearch.js new file mode 100644 index 0000000000..f39999be83 --- /dev/null +++ b/lib/service/storage/7/elasticsearch.js @@ -0,0 +1,2926 @@ +"use strict"; +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ES7 = void 0; +const lodash_1 = __importDefault(require("lodash")); +const sdk_es7_1 = require("sdk-es7"); +const assert_1 = __importDefault(require("assert")); +const ms_1 = __importDefault(require("ms")); +const bluebird_1 = __importDefault(require("bluebird")); +const semver_1 = __importDefault(require("semver")); +const debug_1 = __importDefault(require("../../../util/debug")); +const esWrapper_1 = __importDefault(require("./esWrapper")); +const queryTranslator_1 = __importDefault(require("../commons/queryTranslator")); +const didYouMean_1 = __importDefault(require("../../../util/didYouMean")); +const kerror = __importStar(require("../../../kerror")); +const requestAssertions_1 = require("../../../util/requestAssertions"); +const safeObject_1 = require("../../../util/safeObject"); +const storeScopeEnum_1 = __importDefault(require("../../../core/storage/storeScopeEnum")); +const extractFields_1 = __importDefault(require("../../../util/extractFields")); +const mutex_1 = require("../../../util/mutex"); +const name_generator_1 = require("../../../util/name-generator"); +(0, debug_1.default)("kuzzle:services:elasticsearch"); +const SCROLL_CACHE_PREFIX = "_docscroll_"; +const ROOT_MAPPING_PROPERTIES = [ + "properties", + "_meta", + "dynamic", + "dynamic_templates", +]; +const CHILD_MAPPING_PROPERTIES = ["type"]; +// Used for collection emulation +const HIDDEN_COLLECTION = "_kuzzle_keep"; +const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS +const PRIVATE_PREFIX = "%"; +const PUBLIC_PREFIX = "&"; +const INDEX_PREFIX_POSITION_IN_INDICE = 0; +const INDEX_PREFIX_POSITION_IN_ALIAS = 1; +const NAME_SEPARATOR = "."; +const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; +const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; +// used to check whether we need to wait for ES to initialize or not +var esStateEnum; +(function (esStateEnum) { + esStateEnum[esStateEnum["AWAITING"] = 1] = "AWAITING"; + esStateEnum[esStateEnum["NONE"] = 2] = "NONE"; + esStateEnum[esStateEnum["OK"] = 3] = "OK"; +})(esStateEnum || (esStateEnum = {})); +let esState = esStateEnum.NONE; +/** + * @param {Kuzzle} kuzzle kuzzle instance + * @param {Object} config Service configuration + * @param {storeScopeEnum} scope + * @constructor + */ +class ES7 { + constructor(config, scope = storeScopeEnum_1.default.PUBLIC) { + this._config = config; + this._scope = scope; + this._indexPrefix = + scope === storeScopeEnum_1.default.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; + this._client = null; + this._esWrapper = null; + this._esVersion = null; + this._translator = new queryTranslator_1.default(); + // Allowed root key of a search query + this.searchBodyKeys = [ + "aggregations", + "aggs", + "collapse", + "explain", + "fields", + "from", + "highlight", + "query", + "search_after", + "search_timeout", + "size", + "sort", + "suggest", + "_name", + "_source", + "_source_excludes", + "_source_includes", + ]; + /** + * Only allow stored-scripts in queries + */ + this.scriptKeys = ["script", "_script"]; + this.scriptAllowedArgs = ["id", "params"]; + this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); + this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); + } + get scope() { + return this._scope; + } + /** + * Initializes the elasticsearch client + * + * @override + * @returns {Promise} + */ + async _initSequence() { + if (this._client) { + return; + } + if (global.NODE_ENV !== "development" && + this._config.commonMapping.dynamic === "true") { + global.kuzzle.log.warn([ + "Your dynamic mapping policy is set to 'true' for new fields.", + "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", + 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', + ].join("\n")); + } + this._client = new sdk_es7_1.Client(this._config.client); + await this.waitForElasticsearch(); + this._esWrapper = new esWrapper_1.default(this._client); + const { body: { version }, } = await this._client.info(); + if (version && + !semver_1.default.satisfies(semver_1.default.coerce(version.number), ">= 7.0.0")) { + throw kerror.get("services", "storage", "version_mismatch", version.number); + } + this._esVersion = version; + } + /** + * Translate Koncorde filters to Elasticsearch query + * + * @param {Object} filters - Set of valid Koncorde filters + * @returns {Object} Equivalent Elasticsearch query + */ + translateKoncordeFilters(filters) { + return this._translator.translate(filters); + } + /** + * Returns some basic information about this service + * @override + * + * @returns {Promise.} service informations + */ + info() { + const result = { + type: "elasticsearch", + version: this._esVersion, + }; + return this._client + .info() + .then(({ body }) => { + result.version = body.version.number; + result.lucene = body.version.lucene_version; + return this._client.cluster.health(); + }) + .then(({ body }) => { + result.status = body.status; + return this._client.cluster.stats({ human: true }); + }) + .then(({ body }) => { + result.spaceUsed = body.indices.store.size; + result.nodes = body.nodes; + return result; + }) + .catch((error) => this._esWrapper.reject(error)); + } + /** + * Returns detailed multi-level storage stats data + * + * @returns {Promise.} + */ + async stats() { + const esRequest = { + metric: ["docs", "store"], + }; + const { body } = await this._client.indices.stats(esRequest); + const indexes = {}; + let size = 0; + for (const [indice, indiceInfo] of Object.entries(body.indices)) { + const infos = indiceInfo; + // Ignore non-Kuzzle indices + if (!indice.startsWith(PRIVATE_PREFIX) && + !indice.startsWith(PUBLIC_PREFIX)) { + continue; + } + const aliases = await this._getAliasFromIndice(indice); + const alias = aliases[0]; + const indexName = this._extractIndex(alias); + const collectionName = this._extractCollection(alias); + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + collectionName === HIDDEN_COLLECTION) { + continue; + } + if (!indexes[indexName]) { + indexes[indexName] = { + collections: [], + name: indexName, + size: 0, + }; + } + indexes[indexName].collections.push({ + documentCount: infos.total.docs.count, + name: collectionName, + size: infos.total.store.size_in_bytes, + }); + indexes[indexName].size += infos.total.store.size_in_bytes; + size += infos.total.store.size_in_bytes; + } + return { + indexes: Object.values(indexes), + size, + }; + } + /** + * Scrolls results from previous elasticsearch query. + * Automatically clears the scroll context after the last result page has + * been fetched. + * + * @param {String} scrollId - Scroll identifier + * @param {Object} options - scrollTTL (default scrollTTL) + * + * @returns {Promise.<{ scrollId, hits, aggregations, total }>} + */ + async scroll(scrollId, { scrollTTL } = {}) { + const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; + const esRequest = { + scroll: _scrollTTL, + scroll_id: scrollId, + }; + const cacheKey = SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); + (0, debug_1.default)("Scroll: %o", esRequest); + if (_scrollTTL) { + const scrollDuration = (0, ms_1.default)(_scrollTTL); + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get("services", "storage", "scroll_duration_too_great", _scrollTTL); + } + } + const stringifiedScrollInfo = await global.kuzzle.ask("core:cache:internal:get", cacheKey); + if (!stringifiedScrollInfo) { + throw kerror.get("services", "storage", "unknown_scroll_id"); + } + const scrollInfo = JSON.parse(stringifiedScrollInfo); + try { + const { body } = await this._client.scroll(esRequest); + scrollInfo.fetched += body.hits.hits.length; + if (scrollInfo.fetched >= body.hits.total.value) { + (0, debug_1.default)("Last scroll page fetched: deleting scroll %s", body._scroll_id); + await global.kuzzle.ask("core:cache:internal:del", cacheKey); + await this.clearScroll(body._scroll_id); + } + else { + await global.kuzzle.ask("core:cache:internal:store", cacheKey, JSON.stringify(scrollInfo), { + ttl: (0, ms_1.default)(_scrollTTL) || this.scrollTTL, + }); + } + body.remaining = body.hits.total.value - scrollInfo.fetched; + return await this._formatSearchResult(body, scrollInfo); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Searches documents from elasticsearch with a query + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * @param {Object} options - from (undefined), size (undefined), scroll (undefined) + * + * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} + */ + async search({ index, collection, searchBody, targets, } = {}, { from, size, scroll, } = {}) { + let esIndexes; + if (targets && targets.length > 0) { + const indexes = new Set(); + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + indexes.add(alias); + } + } + esIndexes = Array.from(indexes).join(","); + } + else { + esIndexes = this._getAlias(index, collection); + } + const esRequest = { + body: this._sanitizeSearchBody(searchBody), + from, + index: esIndexes, + scroll, + size, + trackTotalHits: true, + }; + if (scroll) { + const scrollDuration = (0, ms_1.default)(scroll); + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get("services", "storage", "scroll_duration_too_great", scroll); + } + } + (0, debug_1.default)("Search: %j", esRequest); + try { + const { body } = await this._client.search(esRequest); + if (body._scroll_id) { + const ttl = (esRequest.scroll && (0, ms_1.default)(esRequest.scroll)) || + (0, ms_1.default)(this._config.defaults.scrollTTL); + await global.kuzzle.ask("core:cache:internal:store", SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), JSON.stringify({ + collection, + fetched: body.hits.hits.length, + index, + targets, + }), { ttl }); + body.remaining = body.hits.total.value - body.hits.hits.length; + } + return await this._formatSearchResult(body, { + collection, + index, + targets, + }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Generate a map that associate an alias to a pair of index and collection + * + * @param {*} targets + * @returns + */ + _mapTargetsToAlias(targets) { + const aliasToTargets = {}; + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + if (!aliasToTargets[alias]) { + aliasToTargets[alias] = { + collection: targetCollection, + index: target.index, + }; + } + } + } + return aliasToTargets; + } + async _formatSearchResult(body, searchInfo = {}) { + let aliasToTargets = {}; + const aliasCache = new Map(); + if (searchInfo.targets) { + /** + * We need to map the alias to the target index and collection, + * so we can later retrieve informations about an index & collection + * based on its alias. + */ + aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); + } + const formatHit = async (hit) => { + let index = searchInfo.index; + let collection = searchInfo.collection; + /** + * If the search has been done on multiple targets, we need to + * retrieve the appropriate index and collection based on the alias + */ + if (hit._index && searchInfo.targets) { + // Caching to reduce call to ES + let aliases = aliasCache.get(hit._index); + if (!aliases) { + // Retrieve all the alias associated to one index + aliases = await this._getAliasFromIndice(hit._index); + aliasCache.set(hit._index, aliases); + } + /** + * Since multiple alias can point to the same index in ES, we need to + * find the first alias that exists in the map of aliases associated + * to the targets. + */ + const alias = aliases.find((_alias) => aliasToTargets[_alias]); + // Retrieve index and collection information based on the matching alias + index = aliasToTargets[alias].index; + collection = aliasToTargets[alias].collection; + } + return { + _id: hit._id, + _score: hit._score, + _source: hit._source, + collection, + highlight: hit.highlight, + index, + }; + }; + async function formatInnerHits(innerHits) { + if (!innerHits) { + return undefined; + } + const formattedInnerHits = {}; + for (const [name, innerHit] of Object.entries(innerHits)) { + formattedInnerHits[name] = await bluebird_1.default.map(innerHit.hits.hits, formatHit); + } + return formattedInnerHits; + } + const hits = await bluebird_1.default.map(body.hits.hits, async (hit) => ({ + inner_hits: await formatInnerHits(hit.inner_hits), + ...(await formatHit(hit)), + })); + return { + aggregations: body.aggregations, + hits, + remaining: body.remaining, + scrollId: body._scroll_id, + suggest: body.suggest, + total: body.hits.total.value, + }; + } + /** + * Gets the document with given ID + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async get(index, collection, id) { + const esRequest = { + id, + index: this._getAlias(index, collection), + }; + // Just in case the user make a GET on url /mainindex/test/_search + // Without this test we return something weird: a result.hits.hits with all + // document without filter because the body is empty in HTTP by default + if (esRequest.id === "_search") { + return kerror.reject("services", "storage", "search_as_an_id"); + } + (0, debug_1.default)("Get document: %o", esRequest); + try { + const { body } = await this._client.get(esRequest); + return { + _id: body._id, + _source: body._source, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Returns the list of documents matching the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mGet(index, collection, ids) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + const esRequest = { + body: { + docs: ids.map((_id) => ({ + _id, + _index: this._getAlias(index, collection), + })), + }, + }; + (0, debug_1.default)("Multi-get documents: %o", esRequest); + let body; + try { + ({ body } = await this._client.mget(esRequest)); // NOSONAR + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + const errors = []; + const items = []; + for (const doc of body.docs) { + if (doc.found) { + items.push({ + _id: doc._id, + _source: doc._source, + _version: doc._version, + }); + } + else { + errors.push(doc._id); + } + } + return { errors, items }; + } + /** + * Counts how many documents match the filter given in body + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * + * @returns {Promise.} count + */ + async count(index, collection, searchBody = {}) { + const esRequest = { + body: this._sanitizeSearchBody(searchBody), + index: this._getAlias(index, collection), + }; + (0, debug_1.default)("Count: %o", esRequest); + try { + const { body } = await this._client.count(esRequest); + return body.count; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends the new document to elasticsearch + * Cleans data to match elasticsearch specifications + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} content - Document content + * @param {Object} options - id (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { _id, _version, _source } + */ + async create(index, collection, content, { id, refresh, userId = null, injectKuzzleMeta = true, } = {}) { + (0, requestAssertions_1.assertIsObject)(content); + const esRequest = { + body: content, + id, + index: this._getAlias(index, collection), + op_type: id ? "create" : "index", + refresh, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + // Add metadata + if (injectKuzzleMeta) { + esRequest.body._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }; + } + (0, debug_1.default)("Create document: %o", esRequest); + try { + const { body } = await this._client.index(esRequest); + return { + _id: body._id, + _source: esRequest.body, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Creates a new document to Elasticsearch, or replace it if it already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) + * + * @returns {Promise.} { _id, _version, _source, created } + */ + async createOrReplace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { + const esRequest = { + body: content, + id, + index: this._getAlias(index, collection), + refresh, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + // Add metadata + if (injectKuzzleMeta) { + esRequest.body._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + (0, debug_1.default)("Create or replace document: %o", esRequest); + try { + const { body } = await this._client.index(esRequest); + return { + _id: body._id, + _source: esRequest.body, + _version: body._version, + created: body.result === "created", // Needed by the notifier + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends the partial document to elasticsearch with the id to update + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async update(index, collection, id, content, { refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { + const esRequest = { + _source: "true", + body: { doc: content }, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + if (injectKuzzleMeta) { + // Add metadata + esRequest.body.doc._kuzzle_info = { + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + (0, debug_1.default)("Update document: %o", esRequest); + try { + const { body } = await this._client.update(esRequest); + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends the partial document to elasticsearch with the id to update + * Creates the document if it doesn't already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async upsert(index, collection, id, content, { defaultValues = {}, refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { + const esRequest = { + _source: "true", + body: { + doc: content, + upsert: { ...defaultValues, ...content }, + }, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + // Add metadata + const user = getKuid(userId); + const now = Date.now(); + if (injectKuzzleMeta) { + esRequest.body.doc._kuzzle_info = { + updatedAt: now, + updater: user, + }; + esRequest.body.upsert._kuzzle_info = { + author: user, + createdAt: now, + }; + } + (0, debug_1.default)("Upsert document: %o", esRequest); + try { + const { body } = await this._client.update(esRequest); + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + created: body.result === "created", + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Replaces a document to Elasticsearch + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async replace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { + const alias = this._getAlias(index, collection); + const esRequest = { + body: content, + id, + index: alias, + refresh, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + if (injectKuzzleMeta) { + // Add metadata + esRequest.body._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + try { + const { body: exists } = await this._client.exists({ id, index: alias }); + if (!exists) { + throw kerror.get("services", "storage", "not_found", id, index, collection); + } + (0, debug_1.default)("Replace document: %o", esRequest); + const { body } = await this._client.index(esRequest); + return { + _id: id, + _source: esRequest.body, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends to elasticsearch the document id to delete + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} options - refresh (undefined) + * + * @returns {Promise} + */ + async delete(index, collection, id, { refresh, } = {}) { + const esRequest = { + id, + index: this._getAlias(index, collection), + refresh, + }; + assertWellFormedRefresh(esRequest); + (0, debug_1.default)("Delete document: %o", esRequest); + try { + await this._client.delete(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + return null; + } + /** + * Deletes all documents matching the provided filters. + * If fetch=false, the max documents write limit is not applied. + * + * Options: + * - size: size of the batch to retrieve documents (no-op if fetch=false) + * - refresh: refresh option for ES + * - fetch: if true, will fetch the documents before delete them + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} options - size (undefined), refresh (undefined), fetch (true) + * + * @returns {Promise.<{ documents, total, deleted, failures: Array<{ _shardId, reason }> }>} + */ + async deleteByQuery(index, collection, query, { refresh, size = 1000, fetch = true, } = {}) { + const esRequest = { + body: this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + size, + }; + if (!(0, safeObject_1.isPlainObject)(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + try { + let documents = []; + if (fetch) { + documents = await this._getAllDocumentsFromQuery(esRequest); + } + (0, debug_1.default)("Delete by query: %o", esRequest); + esRequest.refresh = refresh === "wait_for" ? true : refresh; + const { body } = await this._client.deleteByQuery(esRequest); + return { + deleted: body.deleted, + documents, + failures: body.failures.map(({ shardId, reason }) => ({ + reason, + shardId, + })), + total: body.total, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Delete fields of a document and replace it + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Array} fields - Document fields to be removed + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async deleteFields(index, collection, id, fields, { refresh, userId = null, } = {}) { + const alias = this._getAlias(index, collection); + const esRequest = { + id, + index: alias, + }; + try { + (0, debug_1.default)("DeleteFields document: %o", esRequest); + const { body } = await this._client.get(esRequest); + for (const field of fields) { + if (lodash_1.default.has(body._source, field)) { + lodash_1.default.set(body._source, field, undefined); + } + } + body._source._kuzzle_info = { + ...body._source._kuzzle_info, + updatedAt: Date.now(), + updater: getKuid(userId), + }; + const newEsRequest = { + body: body._source, + id, + index: alias, + refresh, + }; + assertNoRouting(newEsRequest); + assertWellFormedRefresh(newEsRequest); + const { body: updated } = await this._client.index(newEsRequest); + return { + _id: id, + _source: body._source, + _version: updated._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined), size (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async updateByQuery(index, collection, query, changes, { refresh, size = 1000, userId = null, } = {}) { + try { + const esRequest = { + body: this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + size, + }; + const documents = await this._getAllDocumentsFromQuery(esRequest); + for (const document of documents) { + document._source = undefined; + document.body = changes; + } + (0, debug_1.default)("Update by query: %o", esRequest); + const { errors, items } = await this.mUpdate(index, collection, documents, { refresh, userId }); + return { + errors, + successes: items, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async bulkUpdateByQuery(index, collection, query, changes, { refresh = false, } = {}) { + const script = { + params: {}, + source: "", + }; + const flatChanges = (0, extractFields_1.default)(changes, { alsoExtractValues: true }); + for (const { key, value } of flatChanges) { + script.source += `ctx._source.${key} = params['${key}'];`; + script.params[key] = value; + } + const esRequest = { + body: { + query: this._sanitizeSearchBody({ query }).query, + script, + }, + index: this._getAlias(index, collection), + refresh, + }; + (0, debug_1.default)("Bulk Update by query: %o", esRequest); + let response; + try { + response = await this._client.updateByQuery(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + if (response.body.failures.length) { + const errors = response.body.failures.map(({ shardId, reason }) => ({ + reason, + shardId, + })); + throw kerror.get("services", "storage", "incomplete_update", response.body.updated, errors); + } + return { + updated: response.body.updated, + }; + } + /** + * Execute the callback with a batch of documents of specified size until all + * documents matched by the query have been processed. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Function} callback - callback that will be called with the "hits" array + * @param {Object} options - size (10), scrollTTL ('5s') + * + * @returns {Promise.} Array of results returned by the callback + */ + async mExecute(index, collection, query, callback, { size = 10, scrollTTl = "5s", } = {}) { + const esRequest = { + body: this._sanitizeSearchBody({ query }), + from: 0, + index: this._getAlias(index, collection), + scroll: scrollTTl, + size, + }; + if (!(0, safeObject_1.isPlainObject)(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + const client = this._client; + let results = []; + let processed = 0; + let scrollId = null; + try { + results = await new bluebird_1.default((resolve, reject) => { + this._client.search(esRequest, async function getMoreUntilDone(error, { body: { hits, _scroll_id } }) { + if (error) { + reject(error); + return; + } + scrollId = _scroll_id; + const ret = callback(hits.hits); + results.push(await ret); + processed += hits.hits.length; + if (hits.total.value !== processed) { + client.scroll({ + scroll: esRequest.scroll, + scroll_id: _scroll_id, + }, getMoreUntilDone); + } + else { + resolve(results); + } + }); + }); + } + finally { + this.clearScroll(scrollId); + } + return results; + } + /** + * Creates a new index. + * + * This methods creates an hidden collection in the provided index to be + * able to list it. + * This methods resolves if the index name does not already exists either as + * private or public index. + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async createIndex(index) { + this._assertValidIndexAndCollection(index); + let body; + try { + body = (await this._client.cat.aliases({ format: "json" })).body; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias: name }) => name); + for (const alias of aliases) { + const indexName = this._extractIndex(alias); + if (index === indexName) { + const indexType = alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX + ? "private" + : "public"; + throw kerror.get("services", "storage", "index_already_exists", indexType, index); + } + } + await this._createHiddenCollection(index); + return null; + } + /** + * Creates an empty collection. + * Mappings and settings will be applied if supplied. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async createCollection(index, collection, { mappings = {}, settings = {}, } = {}) { + this._assertValidIndexAndCollection(index, collection); + if (collection === HIDDEN_COLLECTION) { + throw kerror.get("services", "storage", "collection_reserved", HIDDEN_COLLECTION); + } + const mutex = new mutex_1.Mutex(`hiddenCollection/create/${index}`); + try { + await mutex.lock(); + if (await this._hasHiddenCollection(index)) { + await this.deleteCollection(index, HIDDEN_COLLECTION); + } + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + finally { + await mutex.unlock(); + } + const esRequest = { + body: { + aliases: { + [this._getAlias(index, collection)]: {}, + }, + mappings: {}, + settings, + }, + index: await this._getAvailableIndice(index, collection), + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + this._checkDynamicProperty(mappings); + const exists = await this.hasCollection(index, collection); + if (exists) { + return this.updateCollection(index, collection, { mappings, settings }); + } + this._checkMappings(mappings); + esRequest.body.mappings = { + _meta: mappings._meta || this._config.commonMapping._meta, + dynamic: mappings.dynamic || this._config.commonMapping.dynamic, + properties: lodash_1.default.merge(mappings.properties, this._config.commonMapping.properties), + }; + esRequest.body.settings.number_of_replicas = + esRequest.body.settings.number_of_replicas || + this._config.defaultSettings.number_of_replicas; + esRequest.body.settings.number_of_shards = + esRequest.body.settings.number_of_shards || + this._config.defaultSettings.number_of_shards; + try { + await this._client.indices.create(esRequest); + } + catch (error) { + if (lodash_1.default.get(error, "meta.body.error.type") === + "resource_already_exists_exception") { + // race condition: the indice has been created between the "exists" + // check above and this "create" attempt + return null; + } + throw this._esWrapper.formatESError(error); + } + return null; + } + /** + * Retrieves settings definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.<{ settings }>} + */ + async getSettings(index, collection) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + (0, debug_1.default)("Get settings: %o", esRequest); + try { + const { body } = await this._client.indices.getSettings(esRequest); + return body[indice].settings.index; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Retrieves mapping definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} options - includeKuzzleMeta (false) + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async getMapping(index, collection, { includeKuzzleMeta = false, } = {}) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + (0, debug_1.default)("Get mapping: %o", esRequest); + try { + const { body } = await this._client.indices.getMapping(esRequest); + const properties = includeKuzzleMeta + ? body[indice].mappings.properties + : lodash_1.default.omit(body[indice].mappings.properties, "_kuzzle_info"); + return { + _meta: body[indice].mappings._meta, + dynamic: body[indice].mappings.dynamic, + properties, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Updates a collection mappings and settings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async updateCollection(index, collection, { mappings = {}, settings = {}, } = {}) { + const esRequest = { + index: await this._getIndice(index, collection), + }; + // If either the putMappings or the putSettings operation fail, we need to + // rollback the whole operation. Since mappings can't be rollback, we try to + // update the settings first, then the mappings and we rollback the settings + // if putMappings fail. + let indexSettings; + try { + indexSettings = await this._getSettings(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + if (!lodash_1.default.isEmpty(settings)) { + await this.updateSettings(index, collection, settings); + } + try { + if (!lodash_1.default.isEmpty(mappings)) { + const previousMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + await this.updateMapping(index, collection, mappings); + if (this._dynamicChanges(previousMappings, mappings)) { + await this.updateSearchIndex(index, collection); + } + } + } + catch (error) { + const allowedSettings = this.getAllowedIndexSettings(indexSettings); + // Rollback to previous settings + if (!lodash_1.default.isEmpty(settings)) { + await this.updateSettings(index, collection, allowedSettings); + } + throw error; + } + return null; + } + /** + * Given index settings we return a new version of index settings + * only with allowed settings that can be set (during update or create index). + * @param indexSettings the index settings + * @returns {{index: *}} a new index settings with only allowed settings. + */ + getAllowedIndexSettings(indexSettings) { + return { + index: lodash_1.default.omit(indexSettings.index, [ + "creation_date", + "provided_name", + "uuid", + "version", + ]), + }; + } + /** + * Sends an empty UpdateByQuery request to update the search index + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @returns {Promise.} {} + */ + async updateSearchIndex(index, collection) { + const esRequest = { + body: {}, + // @cluster: conflicts when two nodes start at the same time + conflicts: "proceed", + index: this._getAlias(index, collection), + refresh: true, + // This operation can take some time: this should be an ES + // background task. And it's preferable to a request timeout when + // processing large indexes. + wait_for_completion: false, + }; + (0, debug_1.default)("UpdateByQuery: %o", esRequest); + try { + await this._client.updateByQuery(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Update a collection mappings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} mappings - Collection mappings in ES format + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async updateMapping(index, collection, mappings = {}) { + const esRequest = { + body: {}, + index: this._getAlias(index, collection), + }; + this._checkDynamicProperty(mappings); + const collectionMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + this._checkMappings(mappings); + esRequest.body = { + _meta: mappings._meta || collectionMappings._meta, + dynamic: mappings.dynamic || collectionMappings.dynamic, + properties: mappings.properties, + }; + (0, debug_1.default)("Update mapping: %o", esRequest); + try { + await this._client.indices.putMapping(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const fullProperties = lodash_1.default.merge(collectionMappings.properties, mappings.properties); + return { + _meta: esRequest.body._meta, + dynamic: esRequest.body.dynamic, + properties: fullProperties, + }; + } + /** + * Updates a collection settings (eg: analyzers) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} settings - Collection settings in ES format + * + * @returns {Promise} + */ + async updateSettings(index, collection, settings = {}) { + const esRequest = { + index: this._getAlias(index, collection), + }; + await this._client.indices.close(esRequest); + try { + await this._client.indices.putSettings({ ...esRequest, body: settings }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + finally { + await this._client.indices.open(esRequest); + } + return null; + } + /** + * Empties the content of a collection. Keep the existing mapping and settings. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async truncateCollection(index, collection) { + let mappings; + let settings; + const esRequest = { + index: await this._getIndice(index, collection), + }; + try { + mappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + settings = await this._getSettings(esRequest); + settings = { + ...settings, + ...this.getAllowedIndexSettings(settings), + }; + await this._client.indices.delete(esRequest); + await this._client.indices.create({ + ...esRequest, + body: { + aliases: { + [this._getAlias(index, collection)]: {}, + }, + mappings, + settings, + }, + wait_for_active_shards: await this._getWaitForActiveShards(), + }); + return null; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Runs several action and document + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents to import + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async import(index, collection, documents, { refresh, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection); + const dateNow = Date.now(); + const esRequest = { + body: documents, + refresh, + timeout, + }; + const kuzzleMeta = { + created: { + author: getKuid(userId), + createdAt: dateNow, + updatedAt: null, + updater: null, + }, + updated: { + updatedAt: dateNow, + updater: getKuid(userId), + }, + }; + assertWellFormedRefresh(esRequest); + this._scriptCheck(documents); + this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); + let response; + try { + response = await this._client.bulk(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const body = response.body; + const result = { + errors: [], + items: [], + }; + let idx = 0; + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const row = body.items[i]; + const action = Object.keys(row)[0]; + const item = row[action]; + if (item.status >= 400) { + const error = { + _id: item._id, + status: item.status, + }; + // update action contain body in "doc" field + // the delete action is not followed by an action payload + if (action === "update") { + error._source = documents[idx + 1].doc; + error._source._kuzzle_info = undefined; + } + else if (action !== "delete") { + error._source = documents[idx + 1]; + error._source._kuzzle_info = undefined; + } + // ES response does not systematicaly include an error object + // (e.g. delete action with 404 status) + if (item.error) { + error.error = { + reason: item.error.reason, + type: item.error.type, + }; + } + result.errors.push({ [action]: error }); + } + else { + result.items.push({ + [action]: { + _id: item._id, + status: item.status, + }, + }); + } + // the delete action is not followed by an action payload + idx = action === "delete" ? idx + 1 : idx + 2; + } + /* end critical code section */ + return result; + } + /** + * Retrieves the complete list of existing collections in the current index + * + * @param {String} index - Index name + * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results + * + * @returns {Promise.} Collection names + */ + async listCollections(index, { includeHidden = false } = {}) { + let body; + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias }) => alias); + const schema = this._extractSchema(aliases, { includeHidden }); + return schema[index] || []; + } + /** + * Retrieves the complete list of indexes + * + * @returns {Promise.} Index names + */ + async listIndexes() { + let body; + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias }) => alias); + const schema = this._extractSchema(aliases); + return Object.keys(schema); + } + /** + * Returns an object containing the list of indexes and collections + * + * @returns {Object.} Object + */ + async getSchema() { + let body; + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias }) => alias); + const schema = this._extractSchema(aliases, { includeHidden: true }); + for (const [index, collections] of Object.entries(schema)) { + schema[index] = collections.filter((c) => c !== HIDDEN_COLLECTION); + } + return schema; + } + /** + * Retrieves the complete list of aliases + * + * @returns {Promise.} [ { alias, index, collection, indice } ] + */ + async listAliases() { + let body; + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = []; + for (const { alias, index: indice } of body) { + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { + aliases.push({ + alias, + collection: this._extractCollection(alias), + index: this._extractIndex(alias), + indice, + }); + } + } + return aliases; + } + /** + * Deletes a collection + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async deleteCollection(index, collection) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + try { + await this._client.indices.delete(esRequest); + const alias = this._getAlias(index, collection); + if (await this._checkIfAliasExists(alias)) { + await this._client.indices.deleteAlias({ + index: indice, + name: alias, + }); + } + await this._createHiddenCollection(index); + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + return null; + } + /** + * Deletes multiple indexes + * + * @param {String[]} indexes - Index names + * + * @returns {Promise.} + */ + async deleteIndexes(indexes = []) { + if (indexes.length === 0) { + return bluebird_1.default.resolve([]); + } + const deleted = new Set(); + try { + const { body } = await this._client.cat.aliases({ format: "json" }); + const esRequest = body.reduce((request, { alias, index: indice }) => { + const index = this._extractIndex(alias); + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + !indexes.includes(index)) { + return request; + } + deleted.add(index); + request.index.push(indice); + return request; + }, { index: [] }); + if (esRequest.index.length === 0) { + return []; + } + (0, debug_1.default)("Delete indexes: %o", esRequest); + await this._client.indices.delete(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + return Array.from(deleted); + } + /** + * Deletes an index + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async deleteIndex(index) { + await this.deleteIndexes([index]); + return null; + } + /** + * Forces a refresh on the collection. + * + * /!\ Can lead to some performance issues. + * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} { _shards } + */ + async refreshCollection(index, collection) { + const esRequest = { + index: this._getAlias(index, collection), + }; + let _shards; + try { + ({ + body: { _shards }, + } = await this._client.indices.refresh(esRequest)); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + return { _shards }; + } + /** + * Returns true if the document exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.} + */ + async exists(index, collection, id) { + const esRequest = { + id, + index: this._getAlias(index, collection), + }; + try { + const { body: exists } = await this._client.exists(esRequest); + return exists; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Returns the list of documents existing with the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mExists(index, collection, ids) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + const esRequest = { + _source: "false", + body: { + docs: ids.map((_id) => ({ _id })), + }, + index: this._getAlias(index, collection), + }; + (0, debug_1.default)("mExists: %o", esRequest); + let body; + try { + ({ body } = await this._client.mget(esRequest)); // NOSONAR + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + const errors = []; + const items = []; + for (let i = 0; i < body.docs.length; i++) { + const doc = body.docs[i]; + if (doc.found) { + items.push(doc._id); + } + else { + errors.push(doc._id); + } + } + return { errors, items }; + } + /** + * Returns true if the index exists + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async hasIndex(index) { + const indexes = await this.listIndexes(); + return indexes.some((idx) => idx === index); + } + /** + * Returns true if the collection exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} + */ + async hasCollection(index, collection) { + const collections = await this.listCollections(index); + return collections.some((col) => col === collection); + } + /** + * Returns true if the index has the hidden collection + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async _hasHiddenCollection(index) { + const collections = await this.listCollections(index, { + includeHidden: true, + }); + return collections.some((col) => col === HIDDEN_COLLECTION); + } + /** + * Creates multiple documents at once. + * If a content has no id, one is automatically generated and assigned to it. + * If a content has a specified identifier, it is rejected if it already exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mCreate(index, collection, documents, { refresh, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection), kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); + // prepare the mget request, but only for document having a specified id + const { body } = documentsToGet.length > 0 + ? await this._client.mget({ + body: { docs: documentsToGet }, + index: alias, + }) + : { body: { docs: [] } }; + const existingDocuments = body.docs; + const esRequest = { + body: [], + index: alias, + refresh, + timeout, + }; + const toImport = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + // Documents are retrieved in the same order than we got them from user + if (typeof document._id === "string" && existingDocuments[idx]) { + if (existingDocuments[idx].found) { + document._source._kuzzle_info = undefined; + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document already exists", + status: 400, + }); + } + else { + esRequest.body.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.body.push(document._source); + toImport.push(document); + } + idx++; + } + else { + esRequest.body.push({ index: { _index: alias } }); + esRequest.body.push(document._source); + toImport.push(document); + } + } + /* end critical code section */ + return this._mExecute(esRequest, toImport, rejected); + } + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) + * + * @returns {Promise.<{ items, errors }> + */ + async mCreateOrReplace(index, collection, documents, { refresh, timeout, userId = null, injectKuzzleMeta = true, limits = true, source = true, } = {}) { + let kuzzleMeta = {}; + if (injectKuzzleMeta) { + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }; + } + const alias = this._getAlias(index, collection); + const esRequest = { + body: [], + index: alias, + refresh, + timeout, + }; + const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); + esRequest.body = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.body.push({ + index: { + _id: extractedDocuments[i]._id, + _index: alias, + }, + }); + esRequest.body.push(extractedDocuments[i]._source); + } + /* end critical code section */ + return this._mExecute(esRequest, extractedDocuments, rejected, { + limits, + source, + }); + } + /** + * Updates multiple documents with one request + * Replacements are rejected if targeted documents do not exist + * (like with the normal "update" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mUpdate(index, collection, documents, { refresh = undefined, retryOnConflict = 0, timeout = undefined, userId = null, } = {}) { + const alias = this._getAlias(index, collection), toImport = [], esRequest = { + body: [], + index: alias, + refresh, + timeout, + }, kuzzleMeta = { + _kuzzle_info: { + updatedAt: Date.now(), + updater: getKuid(userId), + }, + }, { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const extractedDocument = extractedDocuments[i]; + if (typeof extractedDocument._id === "string") { + esRequest.body.push({ + update: { + _id: extractedDocument._id, + _index: alias, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }); + // _source: true => makes ES return the updated document source in the + // response. Required by the real-time notifier component + esRequest.body.push({ + _source: true, + doc: extractedDocument._source, + }); + toImport.push(extractedDocument); + } + else { + extractedDocument._source._kuzzle_info = undefined; + rejected.push({ + document: { + _id: extractedDocument._id, + body: extractedDocument._source, + }, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + const response = await this._mExecute(esRequest, toImport, rejected); + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + status: item.status, + })); + return response; + } + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async mUpsert(index, collection, documents, { refresh, retryOnConflict = 0, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection); + const esRequest = { + body: [], + refresh, + timeout, + }; + const user = getKuid(userId); + const now = Date.now(); + const kuzzleMeta = { + doc: { + _kuzzle_info: { + updatedAt: now, + updater: user, + }, + }, + upsert: { + _kuzzle_info: { + author: user, + createdAt: now, + }, + }, + }; + const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta, { + prepareMUpsert: true, + requireId: true, + }); + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.body.push({ + update: { + _id: extractedDocuments[i]._id, + _index: alias, + _source: true, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }, { + doc: extractedDocuments[i]._source.changes, + upsert: extractedDocuments[i]._source.default, + }); + // _source: true + // Makes ES return the updated document source in the response. + // Required by the real-time notifier component + } + /* end critical code section */ + const response = await this._mExecute(esRequest, extractedDocuments, rejected); + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + created: item.result === "created", // Needed by the notifier + status: item.status, + })); + return response; + } + /** + * Replaces multiple documents at once. + * Replacements are rejected if targeted documents do not exist + * (like with the normal "replace" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mReplace(index, collection, documents, { refresh, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection), kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { + prepareMGet: true, + requireId: true, + }); + if (documentsToGet.length < 1) { + return { errors: rejected, items: [] }; + } + const { body } = await this._client.mget({ + body: { docs: documentsToGet }, + index: alias, + }); + const existingDocuments = body.docs; + const esRequest = { + body: [], + refresh, + timeout, + }; + const toImport = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + // Documents are retrieved in the same order than we got them from user + if (existingDocuments[i]?.found) { + esRequest.body.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.body.push(document._source); + toImport.push(document); + } + else { + document._source._kuzzle_info = undefined; + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + return this._mExecute(esRequest, toImport, rejected); + } + /** + * Deletes multiple documents with one request + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Documents IDs + * @param {Object} options - timeout (undefined), refresh (undefined) + * + * @returns {Promise.<{ documents, errors }> + */ + async mDelete(index, collection, ids, { refresh, } = {}) { + const query = { ids: { values: [] } }; + const validIds = []; + const partialErrors = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < ids.length; i++) { + const _id = ids[i]; + if (typeof _id === "string") { + validIds.push(_id); + } + else { + partialErrors.push({ + _id, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + await this.refreshCollection(index, collection); + const { items } = await this.mGet(index, collection, validIds); + let idx = 0; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < validIds.length; i++) { + const validId = validIds[i]; + const item = items[idx]; + if (item && item._id === validId) { + query.ids.values.push(validId); + idx++; + } + else { + partialErrors.push({ + _id: validId, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + // @todo duplicated query to get documents body, mGet here and search in + // deleteByQuery + const { documents } = await this.deleteByQuery(index, collection, query, { + refresh, + }); + return { documents, errors: partialErrors }; + } + /** + * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments + * Returns a standardized ES response object, containing the list of + * successfully performed operations, and the rejected ones + * + * @param {Object} esRequest - Elasticsearch request + * @param {Object[]} documents - Document sources (format: {_id, _source}) + * @param {Object[]} partialErrors - pre-rejected documents + * @param {Object} options - limits (true) + * + * @returns {Promise.} results + */ + async _mExecute(esRequest, documents, partialErrors = [], { limits = true, source = true } = {}) { + assertWellFormedRefresh(esRequest); + if (this._hasExceededLimit(limits, documents)) { + return kerror.reject("services", "storage", "write_limit_exceeded"); + } + let response = { body: { items: [] } }; + if (documents.length > 0) { + try { + response = await this._client.bulk(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + const body = response.body; + const successes = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const item = body.items[i]; + const result = item[Object.keys(item)[0]]; + if (result.status >= 400) { + if (result.status === 404) { + partialErrors.push({ + document: { + _id: documents[i]._id, + body: documents[i]._source, + }, + reason: "document not found", + status: result.status, + }); + } + else { + partialErrors.push({ + document: documents[i], + reason: result.error.reason, + status: result.status, + }); + } + } + else { + successes.push({ + _id: result._id, + _source: source ? documents[i]._source : undefined, + _version: result._version, + created: result.result === "created", + get: result.get, + result: result.result, + status: result.status, // used by mUpdate to get the full document body + }); + } + } + /* end critical code section */ + return { + errors: partialErrors, // @todo rename items to documents + items: successes, + }; + } + /** + * Extracts, injects metadata and validates documents contained + * in a Request + * + * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace + * + * @param {Object[]} documents - Documents + * @param {Object} metadata - Kuzzle metadata + * @param {Object} options - prepareMGet (false), requireId (false) + * + * @returns {Object} { rejected, extractedDocuments, documentsToGet } + */ + _extractMDocuments(documents, metadata, { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}) { + const rejected = []; + const extractedDocuments = []; + const documentsToGet = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < documents.length; i++) { + const document = documents[i]; + if (!(0, safeObject_1.isPlainObject)(document.body) && !prepareMUpsert) { + rejected.push({ + document, + reason: "document body must be an object", + status: 400, + }); + } + else if (!(0, safeObject_1.isPlainObject)(document.changes) && prepareMUpsert) { + rejected.push({ + document, + reason: "document changes must be an object", + status: 400, + }); + } + else if (prepareMUpsert && + document.default && + !(0, safeObject_1.isPlainObject)(document.default)) { + rejected.push({ + document, + reason: "document default must be an object", + status: 400, + }); + } + else if (requireId && typeof document._id !== "string") { + rejected.push({ + document, + reason: "document _id must be a string", + status: 400, + }); + } + else { + this._processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet); + } + } + /* end critical code section */ + return { documentsToGet, extractedDocuments, rejected }; + } + _hasExceededLimit(limits, documents) { + return (limits && + documents.length > global.kuzzle.config.limits.documentsWriteCount); + } + _processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet) { + let extractedDocument; + if (prepareMUpsert) { + extractedDocument = { + _source: { + // Do not use destructuring, it's 10x slower + changes: Object.assign({}, metadata.doc, document.changes), + default: Object.assign({}, metadata.upsert, document.changes, document.default), + }, + }; + } + else { + extractedDocument = { + // Do not use destructuring, it's 10x slower + _source: Object.assign({}, metadata, document.body), + }; + } + if (document._id) { + extractedDocument._id = document._id; + } + extractedDocuments.push(extractedDocument); + if (prepareMGet && typeof document._id === "string") { + documentsToGet.push({ + _id: document._id, + _source: false, + }); + } + } + /** + * Throws an error if the provided mapping is invalid + * + * @param {Object} mapping + * @throws + */ + _checkMappings(mapping, path = [], check = true) { + const properties = Object.keys(mapping); + const mappingProperties = path.length === 0 + ? ROOT_MAPPING_PROPERTIES + : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; + for (const property of properties) { + if (check && !mappingProperties.includes(property)) { + const currentPath = [...path, property].join("."); + throw kerror.get("services", "storage", "invalid_mapping", currentPath, (0, didYouMean_1.default)(property, mappingProperties)); + } + if (property === "properties") { + // type definition level, we don't check + this._checkMappings(mapping[property], [...path, "properties"], false); + } + else if (mapping[property]?.properties) { + // root properties level, check for "properties", "dynamic" and "_meta" + this._checkMappings(mapping[property], [...path, property], true); + } + } + } + /** + * Given index + collection, returns the associated alias name. + * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Alias name (eg: '@&nepali.liia') + */ + _getAlias(index, collection) { + return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + } + /** + * Given an alias name, returns the associated index name. + */ + async _checkIfAliasExists(aliasName) { + const { body } = await this._client.indices.existsAlias({ + name: aliasName, + }); + return body; + } + /** + * Given index + collection, returns the associated indice name. + * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Indice name (eg: '&nepali.liia') + * @throws If there is not exactly one indice associated + */ + async _getIndice(index, collection) { + const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + const { body } = await this._client.cat.aliases({ + format: "json", + name: alias, + }); + if (body.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } + else if (body.length > 1) { + throw kerror.get("services", "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, '"indices"'); + } + return body[0].index; + } + /** + * Given an ES Request returns the settings of the corresponding indice. + * + * @param esRequest the ES Request with wanted settings. + * @return {Promise<*>} the settings of the indice. + * @private + */ + async _getSettings(esRequest) { + const response = await this._client.indices.getSettings(esRequest); + const index = esRequest.index; + return response.body[index].settings; + } + /** + * Given index + collection, returns an available indice name. + * Use this function when creating the associated indice. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Available indice name (eg: '&nepali.liia2') + */ + async _getAvailableIndice(index, collection) { + let indice = this._getAlias(index, collection).substring(INDEX_PREFIX_POSITION_IN_ALIAS); + if (!(await this._client.indices.exists({ index: indice })).body) { + return indice; + } + let notAvailable; + let suffix; + do { + suffix = `.${(0, name_generator_1.randomNumber)(100000)}`; + const overflow = Buffer.from(indice + suffix).length - 255; + if (overflow > 0) { + const indiceBuffer = Buffer.from(indice); + indice = indiceBuffer + .subarray(0, indiceBuffer.length - overflow) + .toString(); + } + const response = await this._client.indices.exists({ + index: indice + suffix, + }); + notAvailable = response.body; + } while (notAvailable); + return indice + suffix; + } + /** + * Given an indice, returns the associated alias name. + * + * @param {String} indice + * + * @returns {String} Alias name (eg: '@&nepali.liia') + * @throws If there is not exactly one alias associated that is prefixed with @ + */ + async _getAliasFromIndice(indice) { + const { body } = await this._client.indices.getAlias({ index: indice }); + const aliases = Object.keys(body[indice].aliases).filter((alias) => alias.startsWith(ALIAS_PREFIX)); + if (aliases.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } + return aliases; + } + /** + * Check for each indice whether it has an alias or not. + * When the latter is missing, create one based on the indice name. + * + * This check avoids a breaking change for those who were using Kuzzle before + * alias attribution for each indice turned into a standard (appear in 2.14.0). + */ + async generateMissingAliases() { + try { + const { body } = await this._client.cat.indices({ format: "json" }); + const indices = body.map(({ index: indice }) => indice); + const aliases = await this.listAliases(); + const indicesWithoutAlias = indices.filter((indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && + !aliases.some((alias) => alias.indice === indice)); + const esRequest = { body: { actions: [] } }; + for (const indice of indicesWithoutAlias) { + esRequest.body.actions.push({ + add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, + }); + } + if (esRequest.body.actions.length > 0) { + await this._client.indices.updateAliases(esRequest); + } + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Throws if index or collection includes forbidden characters + * + * @param {String} index + * @param {String} collection + */ + _assertValidIndexAndCollection(index, collection = null) { + if (!this.isIndexNameValid(index)) { + throw kerror.get("services", "storage", "invalid_index_name", index); + } + if (collection !== null && !this.isCollectionNameValid(collection)) { + throw kerror.get("services", "storage", "invalid_collection_name", collection); + } + } + /** + * Given an alias, extract the associated index. + * + * @param {String} alias + * + * @returns {String} Index name + */ + _extractIndex(alias) { + return alias.substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1); + } + /** + * Given an alias, extract the associated collection. + * + * @param {String} alias + * + * @returns {String} Collection name + */ + _extractCollection(alias) { + const separatorPos = alias.indexOf(NAME_SEPARATOR); + return alias.substr(separatorPos + 1, alias.length); + } + /** + * Given aliases, extract indexes and collections. + * + * @param {Array.} aliases + * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. + * + * @returns {Object.} Indexes as key and an array of their collections as value + */ + _extractSchema(aliases, { includeHidden = false } = {}) { + const schema = {}; + for (const alias of aliases) { + const [indexName, collectionName] = alias + .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) + .split(NAME_SEPARATOR); + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && + (collectionName !== HIDDEN_COLLECTION || includeHidden)) { + if (!schema[indexName]) { + schema[indexName] = []; + } + if (!schema[indexName].includes(collectionName)) { + schema[indexName].push(collectionName); + } + } + } + return schema; + } + /** + * Creates the hidden collection on the provided index if it does not already + * exists + * + * @param {String} index Index name + */ + async _createHiddenCollection(index) { + const mutex = new mutex_1.Mutex(`hiddenCollection/${index}`); + try { + await mutex.lock(); + if (await this._hasHiddenCollection(index)) { + return; + } + const esRequest = { + body: { + aliases: { + [this._getAlias(index, HIDDEN_COLLECTION)]: {}, + }, + settings: { + number_of_replicas: this._config.defaultSettings.number_of_replicas, + number_of_shards: this._config.defaultSettings.number_of_shards, + }, + }, + index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + await this._client.indices.create(esRequest); + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + finally { + await mutex.unlock(); + } + } + /** + * We need to always wait for a minimal number of shards to be available + * before answering to the client. This is to avoid Elasticsearch node + * to return a 404 Not Found error when the client tries to index a + * document in the index. + * To find the best value for this setting, we need to take into account + * the number of nodes in the cluster and the number of shards per index. + */ + async _getWaitForActiveShards() { + const { body } = await this._client.cat.nodes({ format: "json" }); + const numberOfNodes = body.length; + if (numberOfNodes > 1) { + return "all"; + } + return "1"; + } + /** + * Scroll indice in elasticsearch and return all document that match the filter + * /!\ throws a write_limit_exceed error: this method is intended to be used + * by deleteByQuery and updateByQuery + * + * @param {Object} esRequest - Search request body + * + * @returns {Promise.} resolve to an array of documents + */ + async _getAllDocumentsFromQuery(esRequest) { + let { body: { hits, _scroll_id }, } = await this._client.search(esRequest); + if (hits.total.value > global.kuzzle.config.limits.documentsWriteCount) { + throw kerror.get("services", "storage", "write_limit_exceeded"); + } + let documents = hits.hits.map((h) => ({ + _id: h._id, + _source: h._source, + })); + while (hits.total.value !== documents.length) { + ({ + body: { hits, _scroll_id }, + } = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: _scroll_id, + })); + documents = documents.concat(hits.hits.map((h) => ({ + _id: h._id, + _source: h._source, + }))); + } + await this.clearScroll(_scroll_id); + return documents; + } + /** + * Clean and normalize the searchBody + * Ensure only allowed parameters are passed to ES + * + * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) + */ + _sanitizeSearchBody(searchBody) { + // Only allow a whitelist of top level properties + for (const key of Object.keys(searchBody)) { + if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { + throw kerror.get("services", "storage", "invalid_search_query", key); + } + } + // Ensure that the body does not include a script + this._scriptCheck(searchBody); + // Avoid empty queries that causes ES to respond with an error. + // Empty queries are turned into match_all queries + if (lodash_1.default.isEmpty(searchBody.query)) { + searchBody.query = { match_all: {} }; + } + return searchBody; + } + /** + * Throw if a script is used in the query. + * + * Only Stored Scripts are accepted + * + * @param {Object} object + */ + _scriptCheck(object) { + for (const [key, value] of Object.entries(object)) { + if (this.scriptKeys.includes(key)) { + for (const scriptArg of Object.keys(value)) { + if (!this.scriptAllowedArgs.includes(scriptArg)) { + throw kerror.get("services", "storage", "invalid_query_keyword", `${key}.${scriptArg}`); + } + } + } + // Every object must be checked here, even the ones nested into an array + else if (typeof value === "object" && value !== null) { + this._scriptCheck(value); + } + } + } + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isCollectionNameValid(name) { + return _isObjectNameValid(name); + } + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isIndexNameValid(name) { + return _isObjectNameValid(name); + } + /** + * Clears an allocated scroll + * @param {[type]} id [description] + * @returns {[type]} [description] + */ + async clearScroll(id) { + if (id) { + (0, debug_1.default)("clearing scroll: %s", id); + await this._client.clearScroll({ scroll_id: id }); + } + } + /** + * Loads a configuration value from services.storageEngine and assert a valid + * ms format. + * + * @param {String} key - relative path to the key in configuration + * + * @returns {Number} milliseconds + */ + _loadMsConfig(key) { + const configValue = lodash_1.default.get(this._config, key); + (0, assert_1.default)(typeof configValue === "string", `services.storageEngine.${key} must be a string.`); + const parsedValue = (0, ms_1.default)(configValue); + (0, assert_1.default)(typeof parsedValue === "number", `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`); + return parsedValue; + } + /** + * Returns true if one of the mappings dynamic property changes value from + * false to true + */ + _dynamicChanges(previousMappings, newMappings) { + const previousValues = findDynamic(previousMappings); + for (const [path, previousValue] of Object.entries(previousValues)) { + if (previousValue.toString() !== "false") { + continue; + } + const newValue = lodash_1.default.get(newMappings, path); + if (newValue && newValue.toString() !== "false") { + return true; + } + } + return false; + } + async waitForElasticsearch() { + if (esState !== esStateEnum.NONE) { + while (esState !== esStateEnum.OK) { + await bluebird_1.default.delay(1000); + } + return; + } + esState = esStateEnum.AWAITING; + global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); + while (esState !== esStateEnum.OK) { + try { + // Wait for at least 1 shard to be initialized + const health = await this._client.cluster.health({ + wait_for_no_initializing_shards: true, + }); + if (health.body.number_of_pending_tasks === 0) { + global.kuzzle.log.info("[✔] Elasticsearch is ready"); + esState = esStateEnum.OK; + } + else { + global.kuzzle.log.info(`[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining`); + await bluebird_1.default.delay(1000); + } + } + catch (e) { + await bluebird_1.default.delay(1000); + } + } + } + /** + * Checks if the dynamic properties are correct + */ + _checkDynamicProperty(mappings) { + const dynamicProperties = findDynamic(mappings); + for (const [path, value] of Object.entries(dynamicProperties)) { + // Prevent common mistake + if (typeof value === "boolean") { + lodash_1.default.set(mappings, path, value.toString()); + } + else if (typeof value !== "string") { + throw kerror.get("services", "storage", "invalid_mapping", path, "Dynamic property value should be a string."); + } + if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { + throw kerror.get("services", "storage", "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join('", "')}"`); + } + } + } + _setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta) { + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + let lastAction = ""; + const actionNames = ["index", "create", "update", "delete"]; + for (let i = 0; i < esRequest.body.length; i++) { + const item = esRequest.body[i]; + const action = Object.keys(item)[0]; + if (actionNames.indexOf(action) !== -1) { + lastAction = action; + item[action]._index = alias; + if (item[action]?._type) { + item[action]._type = undefined; + } + } + else if (lastAction === "index" || lastAction === "create") { + item._kuzzle_info = kuzzleMeta.created; + } + else if (lastAction === "update") { + this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); + } + } + /* end critical code section */ + } + _setLastActionToKuzzleMetaUpdate(item, kuzzleMeta) { + for (const prop of ["doc", "upsert"]) { + if ((0, safeObject_1.isPlainObject)(item[prop])) { + item[prop]._kuzzle_info = kuzzleMeta.updated; + } + } + } +} +exports.ES7 = ES7; +/** + * Finds paths and values of mappings dynamic properties + * + * @example + * + * findDynamic(mappings); + * { + * "properties.metadata.dynamic": "true", + * "properties.user.properties.address.dynamic": "strict" + * } + */ +function findDynamic(mappings, path = [], results = {}) { + if (mappings.dynamic !== undefined) { + results[path.concat("dynamic").join(".")] = mappings.dynamic; + } + for (const [key, value] of Object.entries(mappings)) { + if ((0, safeObject_1.isPlainObject)(value)) { + findDynamic(value, path.concat(key), results); + } + } + return results; +} +/** + * Forbids the use of the _routing ES option + * + * @param {Object} esRequest + * @throws + */ +function assertNoRouting(esRequest) { + if (esRequest.body._routing) { + throw kerror.get("services", "storage", "no_routing"); + } +} +/** + * Checks if the optional "refresh" argument is well-formed + * + * @param {Object} esRequest + * @throws + */ +function assertWellFormedRefresh(esRequest) { + if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { + throw kerror.get("services", "storage", "invalid_argument", "refresh", '"wait_for", false'); + } +} +function getKuid(userId) { + if (!userId) { + return null; + } + return String(userId); +} +/** + * Checks if an index or collection name is valid + * + * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html + * + * Beware of the length check: ES allows indice names up to 255 bytes, but since + * in Kuzzle we emulate collections as indices, we have to make sure + * that the privacy prefix, the index name, the separator and the collection + * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way + * is to limit index and collection names to 126 bytes and document that + * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) + * + * @param {string} name + * @returns {Boolean} + */ +function _isObjectNameValid(name) { + if (typeof name !== "string" || name.length === 0) { + return false; + } + if (name.toLowerCase() !== name) { + return false; + } + if (Buffer.from(name).length > 126) { + return false; + } + if (name === "_all") { + return false; + } + let valid = true; + for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { + valid = !name.includes(FORBIDDEN_CHARS[i]); + } + return valid; +} +//# sourceMappingURL=elasticsearch.js.map \ No newline at end of file diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts new file mode 100644 index 0000000000..5354a6a1cb --- /dev/null +++ b/lib/service/storage/7/elasticsearch.ts @@ -0,0 +1,3844 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import _ from "lodash"; + +import { ApiResponse, RequestParams, Client } from "sdk-es7"; +import { Index, IndicesCreate } from "sdk-es7/api/requestParams"; +import { TypeMapping } from "sdk-es7/api/types"; +import { + InfoResult, + KRequestBody, + JSONObject, + KImportError, + KRequestParams, +} from "../../../types/storage/7/Elasticsearch"; + +import assert from "assert"; + +import ms from "ms"; +import Bluebird from "bluebird"; +import semver from "semver"; +import debug from "../../../util/debug"; + +import ESWrapper from "./esWrapper"; +import QueryTranslator from "../commons/queryTranslator"; +import didYouMean from "../../../util/didYouMean"; +import * as kerror from "../../../kerror"; +import { assertIsObject } from "../../../util/requestAssertions"; +import { isPlainObject } from "../../../util/safeObject"; +import scopeEnum from "../../../core/storage/storeScopeEnum"; +import extractFields from "../../../util/extractFields"; +import { Mutex } from "../../../util/mutex"; +import { randomNumber } from "../../../util/name-generator"; + +debug("kuzzle:services:elasticsearch"); + +const SCROLL_CACHE_PREFIX = "_docscroll_"; + +const ROOT_MAPPING_PROPERTIES = [ + "properties", + "_meta", + "dynamic", + "dynamic_templates", +]; +const CHILD_MAPPING_PROPERTIES = ["type"]; + +// Used for collection emulation +const HIDDEN_COLLECTION = "_kuzzle_keep"; +const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS +const PRIVATE_PREFIX = "%"; +const PUBLIC_PREFIX = "&"; +const INDEX_PREFIX_POSITION_IN_INDICE = 0; +const INDEX_PREFIX_POSITION_IN_ALIAS = 1; +const NAME_SEPARATOR = "."; +const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; +const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; + +// used to check whether we need to wait for ES to initialize or not +enum esStateEnum { + AWAITING = 1, + NONE = 2, + OK = 3, +} + +let esState = esStateEnum.NONE; + +/** + * @param {Kuzzle} kuzzle kuzzle instance + * @param {Object} config Service configuration + * @param {storeScopeEnum} scope + * @constructor + */ +export class ES7 { + public _client: Client; + public _scope: scopeEnum; + public _indexPrefix: string; + public _esWrapper: ESWrapper; + public _esVersion: any; + public _translator: QueryTranslator; + public searchBodyKeys: string[]; + public scriptKeys: string[]; + public scriptAllowedArgs: string[]; + public maxScrollDuration: number; + public scrollTTL: number; + public _config: any; + + constructor(config: any, scope = scopeEnum.PUBLIC) { + this._config = config; + this._scope = scope; + this._indexPrefix = + scope === scopeEnum.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; + + this._client = null; + this._esWrapper = null; + this._esVersion = null; + this._translator = new QueryTranslator(); + + // Allowed root key of a search query + this.searchBodyKeys = [ + "aggregations", + "aggs", + "collapse", + "explain", + "fields", + "from", + "highlight", + "query", + "search_after", + "search_timeout", + "size", + "sort", + "suggest", + "_name", + "_source", + "_source_excludes", + "_source_includes", + ]; + + /** + * Only allow stored-scripts in queries + */ + this.scriptKeys = ["script", "_script"]; + this.scriptAllowedArgs = ["id", "params"]; + + this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); + + this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); + } + + get scope() { + return this._scope; + } + + /** + * Initializes the elasticsearch client + * + * @override + * @returns {Promise} + */ + async _initSequence() { + if (this._client) { + return; + } + + if ( + global.NODE_ENV !== "development" && + this._config.commonMapping.dynamic === "true" + ) { + global.kuzzle.log.warn( + [ + "Your dynamic mapping policy is set to 'true' for new fields.", + "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", + 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', + ].join("\n") + ); + } + + this._client = new Client(this._config.client); + + await this.waitForElasticsearch(); + + this._esWrapper = new ESWrapper(this._client); + + const { + body: { version }, + } = await this._client.info(); + + if ( + version && + !semver.satisfies(semver.coerce(version.number), ">= 7.0.0") + ) { + throw kerror.get( + "services", + "storage", + "version_mismatch", + version.number + ); + } + + this._esVersion = version; + } + + /** + * Translate Koncorde filters to Elasticsearch query + * + * @param {Object} filters - Set of valid Koncorde filters + * @returns {Object} Equivalent Elasticsearch query + */ + translateKoncordeFilters(filters) { + return this._translator.translate(filters); + } + + /** + * Returns some basic information about this service + * @override + * + * @returns {Promise.} service informations + */ + info() { + const result: InfoResult = { + type: "elasticsearch", + version: this._esVersion, + }; + + return this._client + .info() + .then(({ body }) => { + result.version = body.version.number; + result.lucene = body.version.lucene_version; + + return this._client.cluster.health(); + }) + .then(({ body }) => { + result.status = body.status; + + return this._client.cluster.stats({ human: true }); + }) + .then(({ body }) => { + result.spaceUsed = body.indices.store.size; + result.nodes = body.nodes; + + return result; + }) + .catch((error) => this._esWrapper.reject(error)); + } + + /** + * Returns detailed multi-level storage stats data + * + * @returns {Promise.} + */ + async stats() { + const esRequest = { + metric: ["docs", "store"], + }; + + const { body } = await this._client.indices.stats(esRequest); + const indexes = {}; + let size = 0; + + for (const [indice, indiceInfo] of Object.entries(body.indices)) { + const infos = indiceInfo as any; + // Ignore non-Kuzzle indices + if ( + !indice.startsWith(PRIVATE_PREFIX) && + !indice.startsWith(PUBLIC_PREFIX) + ) { + continue; + } + + const aliases = await this._getAliasFromIndice(indice); + const alias = aliases[0]; + const indexName = this._extractIndex(alias); + const collectionName = this._extractCollection(alias); + + if ( + alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + collectionName === HIDDEN_COLLECTION + ) { + continue; + } + + if (!indexes[indexName]) { + indexes[indexName] = { + collections: [], + name: indexName, + size: 0, + }; + } + indexes[indexName].collections.push({ + documentCount: infos.total.docs.count, + name: collectionName, + size: infos.total.store.size_in_bytes, + }); + indexes[indexName].size += infos.total.store.size_in_bytes; + size += infos.total.store.size_in_bytes; + } + + return { + indexes: Object.values(indexes), + size, + }; + } + + /** + * Scrolls results from previous elasticsearch query. + * Automatically clears the scroll context after the last result page has + * been fetched. + * + * @param {String} scrollId - Scroll identifier + * @param {Object} options - scrollTTL (default scrollTTL) + * + * @returns {Promise.<{ scrollId, hits, aggregations, total }>} + */ + async scroll(scrollId: string, { scrollTTL }: { scrollTTL?: string } = {}) { + const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; + const esRequest: RequestParams.Scroll> = { + scroll: _scrollTTL, + scroll_id: scrollId, + }; + + const cacheKey = + SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); + + debug("Scroll: %o", esRequest); + + if (_scrollTTL) { + const scrollDuration = ms(_scrollTTL); + + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get( + "services", + "storage", + "scroll_duration_too_great", + _scrollTTL + ); + } + } + + const stringifiedScrollInfo = await global.kuzzle.ask( + "core:cache:internal:get", + cacheKey + ); + + if (!stringifiedScrollInfo) { + throw kerror.get("services", "storage", "unknown_scroll_id"); + } + + const scrollInfo = JSON.parse(stringifiedScrollInfo); + + try { + const { body } = await this._client.scroll(esRequest); + + scrollInfo.fetched += body.hits.hits.length; + + if (scrollInfo.fetched >= body.hits.total.value) { + debug("Last scroll page fetched: deleting scroll %s", body._scroll_id); + await global.kuzzle.ask("core:cache:internal:del", cacheKey); + await this.clearScroll(body._scroll_id); + } else { + await global.kuzzle.ask( + "core:cache:internal:store", + cacheKey, + JSON.stringify(scrollInfo), + { + ttl: ms(_scrollTTL) || this.scrollTTL, + } + ); + } + + body.remaining = body.hits.total.value - scrollInfo.fetched; + + return await this._formatSearchResult(body, scrollInfo); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Searches documents from elasticsearch with a query + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * @param {Object} options - from (undefined), size (undefined), scroll (undefined) + * + * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} + */ + async search( + { + index, + collection, + searchBody, + targets, + }: { + index?: string; + collection?: string; + searchBody?: JSONObject; + targets?: any[]; + } = {}, + { + from, + size, + scroll, + }: { + from?: number; + size?: number; + scroll?: string; + } = {} + ) { + let esIndexes: any; + + if (targets && targets.length > 0) { + const indexes = new Set(); + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + + indexes.add(alias); + } + } + + esIndexes = Array.from(indexes).join(","); + } else { + esIndexes = this._getAlias(index, collection); + } + + const esRequest = { + body: this._sanitizeSearchBody(searchBody), + from, + index: esIndexes, + scroll, + size, + trackTotalHits: true, + }; + + if (scroll) { + const scrollDuration = ms(scroll); + + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get( + "services", + "storage", + "scroll_duration_too_great", + scroll + ); + } + } + + debug("Search: %j", esRequest); + + try { + const { body } = await this._client.search(esRequest); + + if (body._scroll_id) { + const ttl = + (esRequest.scroll && ms(esRequest.scroll)) || + ms(this._config.defaults.scrollTTL); + + await global.kuzzle.ask( + "core:cache:internal:store", + SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), + JSON.stringify({ + collection, + fetched: body.hits.hits.length, + index, + targets, + }), + { ttl } + ); + + body.remaining = body.hits.total.value - body.hits.hits.length; + } + + return await this._formatSearchResult(body, { + collection, + index, + targets, + }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Generate a map that associate an alias to a pair of index and collection + * + * @param {*} targets + * @returns + */ + _mapTargetsToAlias(targets) { + const aliasToTargets = {}; + + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + if (!aliasToTargets[alias]) { + aliasToTargets[alias] = { + collection: targetCollection, + index: target.index, + }; + } + } + } + + return aliasToTargets; + } + + async _formatSearchResult(body: any, searchInfo: any = {}) { + let aliasToTargets = {}; + const aliasCache = new Map(); + + if (searchInfo.targets) { + /** + * We need to map the alias to the target index and collection, + * so we can later retrieve informations about an index & collection + * based on its alias. + */ + aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); + } + + const formatHit = async (hit) => { + let index = searchInfo.index; + let collection = searchInfo.collection; + + /** + * If the search has been done on multiple targets, we need to + * retrieve the appropriate index and collection based on the alias + */ + if (hit._index && searchInfo.targets) { + // Caching to reduce call to ES + let aliases = aliasCache.get(hit._index); + if (!aliases) { + // Retrieve all the alias associated to one index + aliases = await this._getAliasFromIndice(hit._index); + aliasCache.set(hit._index, aliases); + } + + /** + * Since multiple alias can point to the same index in ES, we need to + * find the first alias that exists in the map of aliases associated + * to the targets. + */ + const alias = aliases.find((_alias) => aliasToTargets[_alias]); + // Retrieve index and collection information based on the matching alias + index = aliasToTargets[alias].index; + collection = aliasToTargets[alias].collection; + } + + return { + _id: hit._id, + _score: hit._score, + _source: hit._source, + collection, + highlight: hit.highlight, + index, + }; + }; + + async function formatInnerHits(innerHits) { + if (!innerHits) { + return undefined; + } + + const formattedInnerHits = {}; + for (const [name, innerHit] of Object.entries(innerHits)) { + formattedInnerHits[name] = await Bluebird.map( + (innerHit as any).hits.hits, + formatHit + ); + } + return formattedInnerHits; + } + + const hits = await Bluebird.map(body.hits.hits, async (hit) => ({ + inner_hits: await formatInnerHits(hit.inner_hits), + ...(await formatHit(hit)), + })); + + return { + aggregations: body.aggregations, + hits, + remaining: body.remaining, + scrollId: body._scroll_id, + suggest: body.suggest, + total: body.hits.total.value, + }; + } + + /** + * Gets the document with given ID + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async get(index, collection, id) { + const esRequest = { + id, + index: this._getAlias(index, collection), + }; + + // Just in case the user make a GET on url /mainindex/test/_search + // Without this test we return something weird: a result.hits.hits with all + // document without filter because the body is empty in HTTP by default + if (esRequest.id === "_search") { + return kerror.reject("services", "storage", "search_as_an_id"); + } + + debug("Get document: %o", esRequest); + + try { + const { body } = await this._client.get(esRequest); + + return { + _id: body._id, + _source: body._source, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Returns the list of documents matching the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mGet(index: string, collection: string, ids: string[]) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + + const esRequest = { + body: { + docs: ids.map((_id) => ({ + _id, + _index: this._getAlias(index, collection), + })), + }, + }; + + debug("Multi-get documents: %o", esRequest); + + let body; + + try { + ({ body } = await this._client.mget(esRequest)); // NOSONAR + } catch (e) { + throw this._esWrapper.formatESError(e); + } + + const errors = []; + const items = []; + + for (const doc of body.docs) { + if (doc.found) { + items.push({ + _id: doc._id, + _source: doc._source, + _version: doc._version, + }); + } else { + errors.push(doc._id); + } + } + + return { errors, items }; + } + + /** + * Counts how many documents match the filter given in body + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * + * @returns {Promise.} count + */ + async count(index: string, collection: string, searchBody = {}) { + const esRequest = { + body: this._sanitizeSearchBody(searchBody), + index: this._getAlias(index, collection), + }; + + debug("Count: %o", esRequest); + + try { + const { body } = await this._client.count(esRequest); + return body.count; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends the new document to elasticsearch + * Cleans data to match elasticsearch specifications + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} content - Document content + * @param {Object} options - id (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { _id, _version, _source } + */ + async create( + index: string, + collection: string, + content: JSONObject, + { + id, + refresh, + userId = null, + injectKuzzleMeta = true, + }: { + id?: string; + refresh?: boolean | "wait_for"; + userId?: string; + injectKuzzleMeta?: boolean; + } = {} + ) { + assertIsObject(content); + + const esRequest: Index> = { + body: content, + id, + index: this._getAlias(index, collection), + op_type: id ? "create" : "index", + refresh, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + // Add metadata + if (injectKuzzleMeta) { + esRequest.body._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }; + } + + debug("Create document: %o", esRequest); + + try { + const { body } = await this._client.index(esRequest); + + return { + _id: body._id, + _source: esRequest.body, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Creates a new document to Elasticsearch, or replace it if it already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) + * + * @returns {Promise.} { _id, _version, _source, created } + */ + async createOrReplace( + index, + collection, + id, + content, + { + refresh, + userId = null, + injectKuzzleMeta = true, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + injectKuzzleMeta?: boolean; + } = {} + ) { + const esRequest = { + body: content, + id, + index: this._getAlias(index, collection), + refresh, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + // Add metadata + if (injectKuzzleMeta) { + esRequest.body._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + + debug("Create or replace document: %o", esRequest); + + try { + const { body } = await this._client.index(esRequest); + + return { + _id: body._id, + _source: esRequest.body, + _version: body._version, + created: body.result === "created", // Needed by the notifier + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends the partial document to elasticsearch with the id to update + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async update( + index: string, + collection: string, + id: string, + content: JSONObject, + { + refresh, + userId = null, + retryOnConflict, + injectKuzzleMeta = true, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + retryOnConflict?: number; + injectKuzzleMeta?: boolean; + } = {} + ) { + const esRequest: RequestParams.Update> = { + _source: "true", + body: { doc: content }, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + if (injectKuzzleMeta) { + // Add metadata + esRequest.body.doc._kuzzle_info = { + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + + debug("Update document: %o", esRequest); + + try { + const { body } = await this._client.update(esRequest); + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends the partial document to elasticsearch with the id to update + * Creates the document if it doesn't already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async upsert( + index: string, + collection: string, + id: string, + content: JSONObject, + { + defaultValues = {}, + refresh, + userId = null, + retryOnConflict, + injectKuzzleMeta = true, + }: { + defaultValues?: JSONObject; + refresh?: boolean | "wait_for"; + userId?: string; + retryOnConflict?: number; + injectKuzzleMeta?: boolean; + } = {} + ) { + const esRequest: RequestParams.Update> = { + _source: "true", + body: { + doc: content, + upsert: { ...defaultValues, ...content }, + }, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + // Add metadata + const user = getKuid(userId); + const now = Date.now(); + + if (injectKuzzleMeta) { + esRequest.body.doc._kuzzle_info = { + updatedAt: now, + updater: user, + }; + esRequest.body.upsert._kuzzle_info = { + author: user, + createdAt: now, + }; + } + + debug("Upsert document: %o", esRequest); + + try { + const { body } = await this._client.update(esRequest); + + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + created: body.result === "created", + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Replaces a document to Elasticsearch + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async replace( + index: string, + collection: string, + id: string, + content: JSONObject, + { + refresh, + userId = null, + injectKuzzleMeta = true, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + injectKuzzleMeta?: boolean; + } = {} + ) { + const alias = this._getAlias(index, collection); + const esRequest = { + body: content, + id, + index: alias, + refresh, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + if (injectKuzzleMeta) { + // Add metadata + esRequest.body._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + + try { + const { body: exists } = await this._client.exists({ id, index: alias }); + + if (!exists) { + throw kerror.get( + "services", + "storage", + "not_found", + id, + index, + collection + ); + } + + debug("Replace document: %o", esRequest); + + const { body } = await this._client.index(esRequest); + + return { + _id: id, + _source: esRequest.body, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends to elasticsearch the document id to delete + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} options - refresh (undefined) + * + * @returns {Promise} + */ + async delete( + index: string, + collection: string, + id: string, + { + refresh, + }: { + refresh?: boolean | "wait_for"; + } = {} + ) { + const esRequest = { + id, + index: this._getAlias(index, collection), + refresh, + }; + + assertWellFormedRefresh(esRequest); + + debug("Delete document: %o", esRequest); + + try { + await this._client.delete(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + return null; + } + + /** + * Deletes all documents matching the provided filters. + * If fetch=false, the max documents write limit is not applied. + * + * Options: + * - size: size of the batch to retrieve documents (no-op if fetch=false) + * - refresh: refresh option for ES + * - fetch: if true, will fetch the documents before delete them + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} options - size (undefined), refresh (undefined), fetch (true) + * + * @returns {Promise.<{ documents, total, deleted, failures: Array<{ _shardId, reason }> }>} + */ + async deleteByQuery( + index: string, + collection: string, + query: JSONObject, + { + refresh, + size = 1000, + fetch = true, + }: { + refresh?: boolean | "wait_for"; + size?: number; + fetch?: boolean; + } = {} + ) { + const esRequest: RequestParams.DeleteByQuery> = { + body: this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + size, + }; + + if (!isPlainObject(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + + try { + let documents = []; + + if (fetch) { + documents = await this._getAllDocumentsFromQuery(esRequest); + } + + debug("Delete by query: %o", esRequest); + + esRequest.refresh = refresh === "wait_for" ? true : refresh; + + const { body } = await this._client.deleteByQuery(esRequest); + + return { + deleted: body.deleted, + documents, + failures: body.failures.map(({ shardId, reason }) => ({ + reason, + shardId, + })), + total: body.total, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Delete fields of a document and replace it + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Array} fields - Document fields to be removed + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async deleteFields( + index: string, + collection: string, + id: string, + fields: string, + { + refresh, + userId = null, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection); + const esRequest = { + id, + index: alias, + }; + + try { + debug("DeleteFields document: %o", esRequest); + const { body } = await this._client.get(esRequest); + + for (const field of fields) { + if (_.has(body._source, field)) { + _.set(body._source, field, undefined); + } + } + + body._source._kuzzle_info = { + ...body._source._kuzzle_info, + updatedAt: Date.now(), + updater: getKuid(userId), + }; + + const newEsRequest = { + body: body._source, + id, + index: alias, + refresh, + }; + + assertNoRouting(newEsRequest); + assertWellFormedRefresh(newEsRequest); + + const { body: updated } = await this._client.index(newEsRequest); + + return { + _id: id, + _source: body._source, + _version: updated._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined), size (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async updateByQuery( + index: string, + collection: string, + query: JSONObject, + changes: JSONObject, + { + refresh, + size = 1000, + userId = null, + }: { + refresh?: boolean | "wait_for"; + size?: number; + userId?: string; + } = {} + ) { + try { + const esRequest = { + body: this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + size, + }; + + const documents = await this._getAllDocumentsFromQuery(esRequest); + + for (const document of documents) { + document._source = undefined; + document.body = changes; + } + + debug("Update by query: %o", esRequest); + + const { errors, items } = await this.mUpdate( + index, + collection, + documents, + { refresh, userId } + ); + + return { + errors, + successes: items, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async bulkUpdateByQuery( + index: string, + collection: string, + query: JSONObject, + changes: JSONObject, + { + refresh = false, + }: { + refresh?: boolean; + } = {} + ) { + const script = { + params: {}, + source: "", + }; + + const flatChanges = extractFields(changes, { alsoExtractValues: true }); + + for (const { key, value } of flatChanges) { + script.source += `ctx._source.${key} = params['${key}'];`; + script.params[key] = value; + } + + const esRequest: RequestParams.UpdateByQuery> = { + body: { + query: this._sanitizeSearchBody({ query }).query, + script, + }, + index: this._getAlias(index, collection), + refresh, + }; + + debug("Bulk Update by query: %o", esRequest); + + let response; + + try { + response = await this._client.updateByQuery(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + if (response.body.failures.length) { + const errors = response.body.failures.map(({ shardId, reason }) => ({ + reason, + shardId, + })); + + throw kerror.get( + "services", + "storage", + "incomplete_update", + response.body.updated, + errors + ); + } + + return { + updated: response.body.updated, + }; + } + + /** + * Execute the callback with a batch of documents of specified size until all + * documents matched by the query have been processed. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Function} callback - callback that will be called with the "hits" array + * @param {Object} options - size (10), scrollTTL ('5s') + * + * @returns {Promise.} Array of results returned by the callback + */ + async mExecute( + index: string, + collection: string, + query: JSONObject, + callback: any, + { + size = 10, + scrollTTl = "5s", + }: { + size?: number; + scrollTTl?: string; + } = {} + ): Promise { + const esRequest: RequestParams.Search = { + body: this._sanitizeSearchBody({ query }), + from: 0, + index: this._getAlias(index, collection), + scroll: scrollTTl, + size, + }; + + if (!isPlainObject(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + + const client = this._client; + let results = []; + + let processed = 0; + let scrollId = null; + + try { + results = await new Bluebird((resolve, reject) => { + this._client.search( + esRequest, + async function getMoreUntilDone( + error, + { body: { hits, _scroll_id } } + ) { + if (error) { + reject(error); + return; + } + + scrollId = _scroll_id; + + const ret = callback(hits.hits); + + results.push(await ret); + processed += hits.hits.length; + + if (hits.total.value !== processed) { + client.scroll( + { + scroll: esRequest.scroll, + scroll_id: _scroll_id, + }, + getMoreUntilDone + ); + } else { + resolve(results); + } + } + ); + }); + } finally { + this.clearScroll(scrollId); + } + + return results; + } + + /** + * Creates a new index. + * + * This methods creates an hidden collection in the provided index to be + * able to list it. + * This methods resolves if the index name does not already exists either as + * private or public index. + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async createIndex(index: string) { + this._assertValidIndexAndCollection(index); + + let body: ApiResponse>["body"]; + + try { + body = (await this._client.cat.aliases({ format: "json" })).body; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias: name }) => name); + for (const alias of aliases) { + const indexName = this._extractIndex(alias); + + if (index === indexName) { + const indexType = + alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX + ? "private" + : "public"; + + throw kerror.get( + "services", + "storage", + "index_already_exists", + indexType, + index + ); + } + } + + await this._createHiddenCollection(index); + + return null; + } + + /** + * Creates an empty collection. + * Mappings and settings will be applied if supplied. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async createCollection( + index: string, + collection: string, + { + mappings = {}, + settings = {}, + }: { mappings?: TypeMapping; settings?: Record } = {} + ) { + this._assertValidIndexAndCollection(index, collection); + + if (collection === HIDDEN_COLLECTION) { + throw kerror.get( + "services", + "storage", + "collection_reserved", + HIDDEN_COLLECTION + ); + } + + const mutex = new Mutex(`hiddenCollection/create/${index}`); + try { + await mutex.lock(); + + if (await this._hasHiddenCollection(index)) { + await this.deleteCollection(index, HIDDEN_COLLECTION); + } + } catch (error) { + throw this._esWrapper.formatESError(error); + } finally { + await mutex.unlock(); + } + + const esRequest: RequestParams.IndicesCreate> = { + body: { + aliases: { + [this._getAlias(index, collection)]: {}, + }, + mappings: {}, + settings, + }, + index: await this._getAvailableIndice(index, collection), + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + + this._checkDynamicProperty(mappings); + + const exists = await this.hasCollection(index, collection); + if (exists) { + return this.updateCollection(index, collection, { mappings, settings }); + } + + this._checkMappings(mappings); + + esRequest.body.mappings = { + _meta: mappings._meta || this._config.commonMapping._meta, + dynamic: mappings.dynamic || this._config.commonMapping.dynamic, + properties: _.merge( + mappings.properties, + this._config.commonMapping.properties + ), + }; + + esRequest.body.settings.number_of_replicas = + esRequest.body.settings.number_of_replicas || + this._config.defaultSettings.number_of_replicas; + + esRequest.body.settings.number_of_shards = + esRequest.body.settings.number_of_shards || + this._config.defaultSettings.number_of_shards; + + try { + await this._client.indices.create(esRequest); + } catch (error) { + if ( + _.get(error, "meta.body.error.type") === + "resource_already_exists_exception" + ) { + // race condition: the indice has been created between the "exists" + // check above and this "create" attempt + return null; + } + + throw this._esWrapper.formatESError(error); + } + + return null; + } + + /** + * Retrieves settings definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.<{ settings }>} + */ + async getSettings(index: string, collection: string) { + const indice = await this._getIndice(index, collection); + const esRequest: RequestParams.IndicesGetSettings = { + index: indice, + }; + + debug("Get settings: %o", esRequest); + + try { + const { body } = await this._client.indices.getSettings(esRequest); + + return body[indice].settings.index; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Retrieves mapping definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} options - includeKuzzleMeta (false) + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async getMapping( + index: string, + collection: string, + { + includeKuzzleMeta = false, + }: { + includeKuzzleMeta?: boolean; + } = {} + ) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + + debug("Get mapping: %o", esRequest); + + try { + const { body } = await this._client.indices.getMapping(esRequest); + + const properties = includeKuzzleMeta + ? body[indice].mappings.properties + : _.omit(body[indice].mappings.properties, "_kuzzle_info"); + + return { + _meta: body[indice].mappings._meta, + dynamic: body[indice].mappings.dynamic, + properties, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Updates a collection mappings and settings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async updateCollection( + index: string, + collection: string, + { + mappings = {}, + settings = {}, + }: { mappings?: TypeMapping; settings?: Record } = {} + ) { + const esRequest = { + index: await this._getIndice(index, collection), + }; + + // If either the putMappings or the putSettings operation fail, we need to + // rollback the whole operation. Since mappings can't be rollback, we try to + // update the settings first, then the mappings and we rollback the settings + // if putMappings fail. + let indexSettings; + + try { + indexSettings = await this._getSettings(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + if (!_.isEmpty(settings)) { + await this.updateSettings(index, collection, settings); + } + + try { + if (!_.isEmpty(mappings)) { + const previousMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + + await this.updateMapping(index, collection, mappings); + + if (this._dynamicChanges(previousMappings, mappings)) { + await this.updateSearchIndex(index, collection); + } + } + } catch (error) { + const allowedSettings = this.getAllowedIndexSettings(indexSettings); + + // Rollback to previous settings + if (!_.isEmpty(settings)) { + await this.updateSettings(index, collection, allowedSettings); + } + + throw error; + } + + return null; + } + + /** + * Given index settings we return a new version of index settings + * only with allowed settings that can be set (during update or create index). + * @param indexSettings the index settings + * @returns {{index: *}} a new index settings with only allowed settings. + */ + getAllowedIndexSettings(indexSettings) { + return { + index: _.omit(indexSettings.index, [ + "creation_date", + "provided_name", + "uuid", + "version", + ]), + }; + } + + /** + * Sends an empty UpdateByQuery request to update the search index + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @returns {Promise.} {} + */ + async updateSearchIndex(index: string, collection: string) { + const esRequest: RequestParams.UpdateByQuery> = { + body: {}, + // @cluster: conflicts when two nodes start at the same time + conflicts: "proceed", + index: this._getAlias(index, collection), + refresh: true, + // This operation can take some time: this should be an ES + // background task. And it's preferable to a request timeout when + // processing large indexes. + wait_for_completion: false, + }; + + debug("UpdateByQuery: %o", esRequest); + + try { + await this._client.updateByQuery(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Update a collection mappings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} mappings - Collection mappings in ES format + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async updateMapping( + index: string, + collection: string, + mappings: TypeMapping = {} + ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { + const esRequest: RequestParams.IndicesPutMapping> = { + body: {}, + index: this._getAlias(index, collection), + }; + + this._checkDynamicProperty(mappings); + + const collectionMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + + this._checkMappings(mappings); + + esRequest.body = { + _meta: mappings._meta || collectionMappings._meta, + dynamic: mappings.dynamic || collectionMappings.dynamic, + properties: mappings.properties, + }; + + debug("Update mapping: %o", esRequest); + + try { + await this._client.indices.putMapping(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const fullProperties = _.merge( + collectionMappings.properties, + mappings.properties + ); + + return { + _meta: esRequest.body._meta, + dynamic: esRequest.body.dynamic, + properties: fullProperties, + }; + } + + /** + * Updates a collection settings (eg: analyzers) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} settings - Collection settings in ES format + * + * @returns {Promise} + */ + async updateSettings(index, collection, settings = {}) { + const esRequest = { + index: this._getAlias(index, collection), + }; + + await this._client.indices.close(esRequest); + + try { + await this._client.indices.putSettings({ ...esRequest, body: settings }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } finally { + await this._client.indices.open(esRequest); + } + + return null; + } + + /** + * Empties the content of a collection. Keep the existing mapping and settings. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async truncateCollection(index: string, collection: string) { + let mappings; + let settings; + + const esRequest = { + index: await this._getIndice(index, collection), + }; + + try { + mappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + settings = await this._getSettings(esRequest); + settings = { + ...settings, + ...this.getAllowedIndexSettings(settings), + }; + await this._client.indices.delete(esRequest); + + await this._client.indices.create({ + ...esRequest, + body: { + aliases: { + [this._getAlias(index, collection)]: {}, + }, + mappings, + settings, + }, + wait_for_active_shards: await this._getWaitForActiveShards(), + }); + + return null; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Runs several action and document + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents to import + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async import( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection); + const dateNow = Date.now(); + const esRequest = { + body: documents, + refresh, + timeout, + }; + const kuzzleMeta = { + created: { + author: getKuid(userId), + createdAt: dateNow, + updatedAt: null, + updater: null, + }, + updated: { + updatedAt: dateNow, + updater: getKuid(userId), + }, + }; + + assertWellFormedRefresh(esRequest); + this._scriptCheck(documents); + + this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); + + let response: Record; + + try { + response = await this._client.bulk(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const body = response.body; + + const result = { + errors: [], + items: [], + }; + + let idx = 0; + + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const row = body.items[i]; + const action = Object.keys(row)[0]; + const item = row[action]; + + if (item.status >= 400) { + const error: KImportError = { + _id: item._id, + status: item.status, + }; + + // update action contain body in "doc" field + // the delete action is not followed by an action payload + if (action === "update") { + error._source = documents[idx + 1].doc; + error._source._kuzzle_info = undefined; + } else if (action !== "delete") { + error._source = documents[idx + 1]; + error._source._kuzzle_info = undefined; + } + + // ES response does not systematicaly include an error object + // (e.g. delete action with 404 status) + if (item.error) { + error.error = { + reason: item.error.reason, + type: item.error.type, + }; + } + + result.errors.push({ [action]: error }); + } else { + result.items.push({ + [action]: { + _id: item._id, + status: item.status, + }, + }); + } + + // the delete action is not followed by an action payload + idx = action === "delete" ? idx + 1 : idx + 2; + } + /* end critical code section */ + + return result; + } + + /** + * Retrieves the complete list of existing collections in the current index + * + * @param {String} index - Index name + * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results + * + * @returns {Promise.} Collection names + */ + async listCollections(index, { includeHidden = false } = {}) { + let body; + + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias }) => alias); + + const schema = this._extractSchema(aliases, { includeHidden }); + + return schema[index] || []; + } + + /** + * Retrieves the complete list of indexes + * + * @returns {Promise.} Index names + */ + async listIndexes() { + let body: ApiResponse["body"]; + + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias }) => alias); + + const schema = this._extractSchema(aliases); + + return Object.keys(schema); + } + + /** + * Returns an object containing the list of indexes and collections + * + * @returns {Object.} Object + */ + async getSchema() { + let body: ApiResponse["body"]; + + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias }) => alias); + + const schema = this._extractSchema(aliases, { includeHidden: true }); + + for (const [index, collections] of Object.entries(schema)) { + schema[index] = (collections as string[]).filter( + (c) => c !== HIDDEN_COLLECTION + ); + } + + return schema; + } + + /** + * Retrieves the complete list of aliases + * + * @returns {Promise.} [ { alias, index, collection, indice } ] + */ + async listAliases() { + let body; + + try { + ({ body } = await this._client.cat.aliases({ format: "json" })); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = []; + + for (const { alias, index: indice } of body) { + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { + aliases.push({ + alias, + collection: this._extractCollection(alias), + index: this._extractIndex(alias), + indice, + }); + } + } + return aliases; + } + + /** + * Deletes a collection + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async deleteCollection(index: string, collection: string): Promise { + const indice = await this._getIndice(index, collection); + const esRequest: RequestParams.IndicesDelete = { + index: indice, + }; + + try { + await this._client.indices.delete(esRequest); + const alias = this._getAlias(index, collection); + + if (await this._checkIfAliasExists(alias)) { + await this._client.indices.deleteAlias({ + index: indice, + name: alias, + }); + } + + await this._createHiddenCollection(index); + } catch (e) { + throw this._esWrapper.formatESError(e); + } + + return null; + } + + /** + * Deletes multiple indexes + * + * @param {String[]} indexes - Index names + * + * @returns {Promise.} + */ + async deleteIndexes(indexes: string[] = []) { + if (indexes.length === 0) { + return Bluebird.resolve([]); + } + const deleted = new Set(); + + try { + const { body } = await this._client.cat.aliases({ format: "json" }); + + const esRequest = body.reduce( + (request, { alias, index: indice }) => { + const index = this._extractIndex(alias); + + if ( + alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + !indexes.includes(index) + ) { + return request; + } + + deleted.add(index); + request.index.push(indice); + + return request; + }, + { index: [] } + ); + + if (esRequest.index.length === 0) { + return []; + } + + debug("Delete indexes: %o", esRequest); + + await this._client.indices.delete(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + return Array.from(deleted); + } + + /** + * Deletes an index + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async deleteIndex(index: string): Promise { + await this.deleteIndexes([index]); + + return null; + } + + /** + * Forces a refresh on the collection. + * + * /!\ Can lead to some performance issues. + * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} { _shards } + */ + async refreshCollection(index: string, collection: string) { + const esRequest: RequestParams.IndicesRefresh = { + index: this._getAlias(index, collection), + }; + + let _shards: any; + + try { + ({ + body: { _shards }, + } = await this._client.indices.refresh(esRequest)); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + return { _shards }; + } + + /** + * Returns true if the document exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.} + */ + async exists( + index: string, + collection: string, + id: string + ): Promise { + const esRequest: RequestParams.Exists = { + id, + index: this._getAlias(index, collection), + }; + + try { + const { body: exists } = await this._client.exists(esRequest); + + return exists; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Returns the list of documents existing with the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mExists(index: string, collection: string, ids: string[]) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + + const esRequest: RequestParams.Mget = { + _source: "false", + body: { + docs: ids.map((_id) => ({ _id })), + }, + index: this._getAlias(index, collection), + }; + + debug("mExists: %o", esRequest); + + let body; + + try { + ({ body } = await this._client.mget(esRequest)); // NOSONAR + } catch (e) { + throw this._esWrapper.formatESError(e); + } + + const errors = []; + const items = []; + + for (let i = 0; i < body.docs.length; i++) { + const doc = body.docs[i]; + + if (doc.found) { + items.push(doc._id); + } else { + errors.push(doc._id); + } + } + + return { errors, items }; + } + + /** + * Returns true if the index exists + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async hasIndex(index: string): Promise { + const indexes = await this.listIndexes(); + + return indexes.some((idx) => idx === index); + } + + /** + * Returns true if the collection exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} + */ + async hasCollection(index: string, collection: string): Promise { + const collections = await this.listCollections(index); + + return collections.some((col: string) => col === collection); + } + + /** + * Returns true if the index has the hidden collection + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async _hasHiddenCollection(index) { + const collections = await this.listCollections(index, { + includeHidden: true, + }); + + return collections.some((col) => col === HIDDEN_COLLECTION); + } + + /** + * Creates multiple documents at once. + * If a content has no id, one is automatically generated and assigned to it. + * If a content has a specified identifier, it is rejected if it already exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mCreate( + index: string, + collection: string, + documents: JSON[], + { + refresh, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection), + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, + { rejected, extractedDocuments, documentsToGet } = + this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); + + // prepare the mget request, but only for document having a specified id + const { body } = + documentsToGet.length > 0 + ? await this._client.mget({ + body: { docs: documentsToGet }, + index: alias, + }) + : { body: { docs: [] } }; + + const existingDocuments = body.docs; + const esRequest = { + body: [], + index: alias, + refresh, + timeout, + }; + const toImport = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + + // Documents are retrieved in the same order than we got them from user + if (typeof document._id === "string" && existingDocuments[idx]) { + if (existingDocuments[idx].found) { + document._source._kuzzle_info = undefined; + + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document already exists", + status: 400, + }); + } else { + esRequest.body.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.body.push(document._source); + + toImport.push(document); + } + idx++; + } else { + esRequest.body.push({ index: { _index: alias } }); + esRequest.body.push(document._source); + + toImport.push(document); + } + } + /* end critical code section */ + + return this._mExecute(esRequest, toImport, rejected); + } + + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) + * + * @returns {Promise.<{ items, errors }> + */ + async mCreateOrReplace( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + timeout, + userId = null, + injectKuzzleMeta = true, + limits = true, + source = true, + }: KRequestParams = {} + ) { + let kuzzleMeta = {}; + + if (injectKuzzleMeta) { + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }; + } + + const alias = this._getAlias(index, collection); + const esRequest = { + body: [], + index: alias, + refresh, + timeout, + }; + const { rejected, extractedDocuments } = this._extractMDocuments( + documents, + kuzzleMeta + ); + + esRequest.body = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.body.push({ + index: { + _id: extractedDocuments[i]._id, + _index: alias, + }, + }); + esRequest.body.push(extractedDocuments[i]._source); + } + /* end critical code section */ + + return this._mExecute(esRequest, extractedDocuments, rejected, { + limits, + source, + }); + } + + /** + * Updates multiple documents with one request + * Replacements are rejected if targeted documents do not exist + * (like with the normal "update" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mUpdate( + index: string, + collection: string, + documents: JSONObject[], + { + refresh = undefined, + retryOnConflict = 0, + timeout = undefined, + userId = null, + } = {} + ) { + const alias = this._getAlias(index, collection), + toImport = [], + esRequest = { + body: [], + index: alias, + refresh, + timeout, + }, + kuzzleMeta = { + _kuzzle_info: { + updatedAt: Date.now(), + updater: getKuid(userId), + }, + }, + { rejected, extractedDocuments } = this._extractMDocuments( + documents, + kuzzleMeta + ); + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const extractedDocument = extractedDocuments[i]; + + if (typeof extractedDocument._id === "string") { + esRequest.body.push({ + update: { + _id: extractedDocument._id, + _index: alias, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }); + + // _source: true => makes ES return the updated document source in the + // response. Required by the real-time notifier component + esRequest.body.push({ + _source: true, + doc: extractedDocument._source, + }); + toImport.push(extractedDocument); + } else { + extractedDocument._source._kuzzle_info = undefined; + + rejected.push({ + document: { + _id: extractedDocument._id, + body: extractedDocument._source, + }, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + + const response = await this._mExecute(esRequest, toImport, rejected); + + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + status: item.status, + })); + + return response; + } + + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async mUpsert( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + retryOnConflict = 0, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + retryOnConflict?: number; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection); + const esRequest = { + body: [], + refresh, + timeout, + }; + + const user = getKuid(userId); + const now = Date.now(); + const kuzzleMeta = { + doc: { + _kuzzle_info: { + updatedAt: now, + updater: user, + }, + }, + upsert: { + _kuzzle_info: { + author: user, + createdAt: now, + }, + }, + }; + + const { rejected, extractedDocuments } = this._extractMDocuments( + documents, + kuzzleMeta, + { + prepareMUpsert: true, + requireId: true, + } + ); + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.body.push( + { + update: { + _id: extractedDocuments[i]._id, + _index: alias, + _source: true, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }, + { + doc: extractedDocuments[i]._source.changes, + upsert: extractedDocuments[i]._source.default, + } + ); + // _source: true + // Makes ES return the updated document source in the response. + // Required by the real-time notifier component + } + /* end critical code section */ + + const response = await this._mExecute( + esRequest, + extractedDocuments, + rejected + ); + + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + created: item.result === "created", // Needed by the notifier + status: item.status, + })); + + return response; + } + + /** + * Replaces multiple documents at once. + * Replacements are rejected if targeted documents do not exist + * (like with the normal "replace" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mReplace( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection), + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, + { rejected, extractedDocuments, documentsToGet } = + this._extractMDocuments(documents, kuzzleMeta, { + prepareMGet: true, + requireId: true, + }); + + if (documentsToGet.length < 1) { + return { errors: rejected, items: [] }; + } + + const { body } = await this._client.mget({ + body: { docs: documentsToGet }, + index: alias, + }); + + const existingDocuments = body.docs; + const esRequest = { + body: [], + refresh, + timeout, + }; + const toImport = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + + // Documents are retrieved in the same order than we got them from user + if (existingDocuments[i]?.found) { + esRequest.body.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.body.push(document._source); + + toImport.push(document); + } else { + document._source._kuzzle_info = undefined; + + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + + return this._mExecute(esRequest, toImport, rejected); + } + + /** + * Deletes multiple documents with one request + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Documents IDs + * @param {Object} options - timeout (undefined), refresh (undefined) + * + * @returns {Promise.<{ documents, errors }> + */ + async mDelete( + index: string, + collection: string, + ids: string[], + { + refresh, + }: { + refresh?: boolean | "wait_for"; + timeout?: number; + } = {} + ) { + const query = { ids: { values: [] } }; + const validIds = []; + const partialErrors = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < ids.length; i++) { + const _id = ids[i]; + + if (typeof _id === "string") { + validIds.push(_id); + } else { + partialErrors.push({ + _id, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + await this.refreshCollection(index, collection); + + const { items } = await this.mGet(index, collection, validIds); + + let idx = 0; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < validIds.length; i++) { + const validId = validIds[i]; + const item = items[idx]; + + if (item && item._id === validId) { + query.ids.values.push(validId); + idx++; + } else { + partialErrors.push({ + _id: validId, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + + // @todo duplicated query to get documents body, mGet here and search in + // deleteByQuery + const { documents } = await this.deleteByQuery(index, collection, query, { + refresh, + }); + + return { documents, errors: partialErrors }; + } + + /** + * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments + * Returns a standardized ES response object, containing the list of + * successfully performed operations, and the rejected ones + * + * @param {Object} esRequest - Elasticsearch request + * @param {Object[]} documents - Document sources (format: {_id, _source}) + * @param {Object[]} partialErrors - pre-rejected documents + * @param {Object} options - limits (true) + * + * @returns {Promise.} results + */ + async _mExecute( + esRequest: RequestParams.Bulk, + documents: JSONObject[], + partialErrors: JSONObject[] = [], + { limits = true, source = true } = {} + ) { + assertWellFormedRefresh(esRequest); + + if (this._hasExceededLimit(limits, documents)) { + return kerror.reject("services", "storage", "write_limit_exceeded"); + } + + let response = { body: { items: [] } }; + + if (documents.length > 0) { + try { + response = await this._client.bulk(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + const body = response.body; + const successes = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const item = body.items[i]; + const result = item[Object.keys(item)[0]]; + + if (result.status >= 400) { + if (result.status === 404) { + partialErrors.push({ + document: { + _id: documents[i]._id, + body: documents[i]._source, + }, + reason: "document not found", + status: result.status, + }); + } else { + partialErrors.push({ + document: documents[i], + reason: result.error.reason, + status: result.status, + }); + } + } else { + successes.push({ + _id: result._id, + _source: source ? documents[i]._source : undefined, + _version: result._version, + created: result.result === "created", + get: result.get, + result: result.result, + status: result.status, // used by mUpdate to get the full document body + }); + } + } + /* end critical code section */ + + return { + errors: partialErrors, // @todo rename items to documents + items: successes, + }; + } + + /** + * Extracts, injects metadata and validates documents contained + * in a Request + * + * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace + * + * @param {Object[]} documents - Documents + * @param {Object} metadata - Kuzzle metadata + * @param {Object} options - prepareMGet (false), requireId (false) + * + * @returns {Object} { rejected, extractedDocuments, documentsToGet } + */ + _extractMDocuments( + documents: JSONObject[], + metadata: JSONObject, + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} + ) { + const rejected = []; + const extractedDocuments = []; + const documentsToGet = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < documents.length; i++) { + const document = documents[i]; + + if (!isPlainObject(document.body) && !prepareMUpsert) { + rejected.push({ + document, + reason: "document body must be an object", + status: 400, + }); + } else if (!isPlainObject(document.changes) && prepareMUpsert) { + rejected.push({ + document, + reason: "document changes must be an object", + status: 400, + }); + } else if ( + prepareMUpsert && + document.default && + !isPlainObject(document.default) + ) { + rejected.push({ + document, + reason: "document default must be an object", + status: 400, + }); + } else if (requireId && typeof document._id !== "string") { + rejected.push({ + document, + reason: "document _id must be a string", + status: 400, + }); + } else { + this._processExtract( + prepareMUpsert, + prepareMGet, + metadata, + document, + extractedDocuments, + documentsToGet + ); + } + } + /* end critical code section */ + + return { documentsToGet, extractedDocuments, rejected }; + } + + private _hasExceededLimit(limits: boolean, documents: JSONObject[]) { + return ( + limits && + documents.length > global.kuzzle.config.limits.documentsWriteCount + ); + } + + private _processExtract( + prepareMUpsert: boolean, + prepareMGet: boolean, + metadata: JSONObject, + document: JSONObject, + extractedDocuments: JSONObject[], + documentsToGet: JSONObject[] + ) { + let extractedDocument; + + if (prepareMUpsert) { + extractedDocument = { + _source: { + // Do not use destructuring, it's 10x slower + changes: Object.assign({}, metadata.doc, document.changes), + default: Object.assign( + {}, + metadata.upsert, + document.changes, + document.default + ), + }, + }; + } else { + extractedDocument = { + // Do not use destructuring, it's 10x slower + _source: Object.assign({}, metadata, document.body), + }; + } + + if (document._id) { + extractedDocument._id = document._id; + } + + extractedDocuments.push(extractedDocument); + + if (prepareMGet && typeof document._id === "string") { + documentsToGet.push({ + _id: document._id, + _source: false, + }); + } + } + + /** + * Throws an error if the provided mapping is invalid + * + * @param {Object} mapping + * @throws + */ + _checkMappings(mapping: JSONObject, path = [], check = true) { + const properties = Object.keys(mapping); + const mappingProperties = + path.length === 0 + ? ROOT_MAPPING_PROPERTIES + : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; + + for (const property of properties) { + if (check && !mappingProperties.includes(property)) { + const currentPath = [...path, property].join("."); + + throw kerror.get( + "services", + "storage", + "invalid_mapping", + currentPath, + didYouMean(property, mappingProperties) + ); + } + + if (property === "properties") { + // type definition level, we don't check + this._checkMappings(mapping[property], [...path, "properties"], false); + } else if (mapping[property]?.properties) { + // root properties level, check for "properties", "dynamic" and "_meta" + this._checkMappings(mapping[property], [...path, property], true); + } + } + } + + /** + * Given index + collection, returns the associated alias name. + * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Alias name (eg: '@&nepali.liia') + */ + _getAlias(index, collection) { + return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + } + + /** + * Given an alias name, returns the associated index name. + */ + async _checkIfAliasExists(aliasName) { + const { body } = await this._client.indices.existsAlias({ + name: aliasName, + }); + return body; + } + + /** + * Given index + collection, returns the associated indice name. + * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Indice name (eg: '&nepali.liia') + * @throws If there is not exactly one indice associated + */ + async _getIndice(index: string, collection: string): Promise { + const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + const { body } = await this._client.cat.aliases({ + format: "json", + name: alias, + }); + + if (body.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } else if (body.length > 1) { + throw kerror.get( + "services", + "storage", + "multiple_indice_alias", + `"alias" starting with "${ALIAS_PREFIX}"`, + '"indices"' + ); + } + + return body[0].index; + } + + /** + * Given an ES Request returns the settings of the corresponding indice. + * + * @param esRequest the ES Request with wanted settings. + * @return {Promise<*>} the settings of the indice. + * @private + */ + async _getSettings( + esRequest: RequestParams.IndicesGetSettings + ): Promise { + const response = await this._client.indices.getSettings(esRequest); + const index = esRequest.index as string; + + return response.body[index].settings; + } + + /** + * Given index + collection, returns an available indice name. + * Use this function when creating the associated indice. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Available indice name (eg: '&nepali.liia2') + */ + async _getAvailableIndice( + index: string, + collection: string + ): Promise { + let indice = this._getAlias(index, collection).substring( + INDEX_PREFIX_POSITION_IN_ALIAS + ); + + if (!(await this._client.indices.exists({ index: indice })).body) { + return indice; + } + + let notAvailable; + let suffix; + do { + suffix = `.${randomNumber(100000)}`; + + const overflow = Buffer.from(indice + suffix).length - 255; + if (overflow > 0) { + const indiceBuffer = Buffer.from(indice); + indice = indiceBuffer + .subarray(0, indiceBuffer.length - overflow) + .toString(); + } + + const response = await this._client.indices.exists({ + index: indice + suffix, + }); + + notAvailable = response.body; + } while (notAvailable); + + return indice + suffix; + } + + /** + * Given an indice, returns the associated alias name. + * + * @param {String} indice + * + * @returns {String} Alias name (eg: '@&nepali.liia') + * @throws If there is not exactly one alias associated that is prefixed with @ + */ + async _getAliasFromIndice(indice) { + const { body } = await this._client.indices.getAlias({ index: indice }); + const aliases = Object.keys(body[indice].aliases).filter((alias) => + alias.startsWith(ALIAS_PREFIX) + ); + + if (aliases.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } + + return aliases; + } + + /** + * Check for each indice whether it has an alias or not. + * When the latter is missing, create one based on the indice name. + * + * This check avoids a breaking change for those who were using Kuzzle before + * alias attribution for each indice turned into a standard (appear in 2.14.0). + */ + async generateMissingAliases() { + try { + const { body } = await this._client.cat.indices({ format: "json" }); + const indices = body.map(({ index: indice }) => indice); + const aliases = await this.listAliases(); + + const indicesWithoutAlias = indices.filter( + (indice) => + indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && + !aliases.some((alias) => alias.indice === indice) + ); + + const esRequest = { body: { actions: [] } }; + for (const indice of indicesWithoutAlias) { + esRequest.body.actions.push({ + add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, + }); + } + + if (esRequest.body.actions.length > 0) { + await this._client.indices.updateAliases(esRequest); + } + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Throws if index or collection includes forbidden characters + * + * @param {String} index + * @param {String} collection + */ + _assertValidIndexAndCollection(index, collection = null) { + if (!this.isIndexNameValid(index)) { + throw kerror.get("services", "storage", "invalid_index_name", index); + } + + if (collection !== null && !this.isCollectionNameValid(collection)) { + throw kerror.get( + "services", + "storage", + "invalid_collection_name", + collection + ); + } + } + + /** + * Given an alias, extract the associated index. + * + * @param {String} alias + * + * @returns {String} Index name + */ + _extractIndex(alias) { + return alias.substr( + INDEX_PREFIX_POSITION_IN_ALIAS + 1, + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 + ); + } + + /** + * Given an alias, extract the associated collection. + * + * @param {String} alias + * + * @returns {String} Collection name + */ + _extractCollection(alias) { + const separatorPos = alias.indexOf(NAME_SEPARATOR); + + return alias.substr(separatorPos + 1, alias.length); + } + + /** + * Given aliases, extract indexes and collections. + * + * @param {Array.} aliases + * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. + * + * @returns {Object.} Indexes as key and an array of their collections as value + */ + _extractSchema(aliases, { includeHidden = false } = {}) { + const schema = {}; + + for (const alias of aliases) { + const [indexName, collectionName] = alias + .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) + .split(NAME_SEPARATOR); + + if ( + alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && + (collectionName !== HIDDEN_COLLECTION || includeHidden) + ) { + if (!schema[indexName]) { + schema[indexName] = []; + } + + if (!schema[indexName].includes(collectionName)) { + schema[indexName].push(collectionName); + } + } + } + + return schema; + } + + /** + * Creates the hidden collection on the provided index if it does not already + * exists + * + * @param {String} index Index name + */ + async _createHiddenCollection(index) { + const mutex = new Mutex(`hiddenCollection/${index}`); + + try { + await mutex.lock(); + + if (await this._hasHiddenCollection(index)) { + return; + } + + const esRequest: IndicesCreate> = { + body: { + aliases: { + [this._getAlias(index, HIDDEN_COLLECTION)]: {}, + }, + settings: { + number_of_replicas: this._config.defaultSettings.number_of_replicas, + number_of_shards: this._config.defaultSettings.number_of_shards, + }, + }, + index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + + await this._client.indices.create(esRequest); + } catch (e) { + throw this._esWrapper.formatESError(e); + } finally { + await mutex.unlock(); + } + } + + /** + * We need to always wait for a minimal number of shards to be available + * before answering to the client. This is to avoid Elasticsearch node + * to return a 404 Not Found error when the client tries to index a + * document in the index. + * To find the best value for this setting, we need to take into account + * the number of nodes in the cluster and the number of shards per index. + */ + async _getWaitForActiveShards(): Promise { + const { body } = await this._client.cat.nodes({ format: "json" }); + + const numberOfNodes = body.length; + + if (numberOfNodes > 1) { + return "all"; + } + + return "1"; + } + + /** + * Scroll indice in elasticsearch and return all document that match the filter + * /!\ throws a write_limit_exceed error: this method is intended to be used + * by deleteByQuery and updateByQuery + * + * @param {Object} esRequest - Search request body + * + * @returns {Promise.} resolve to an array of documents + */ + async _getAllDocumentsFromQuery( + esRequest: RequestParams.Search> + ) { + let { + body: { hits, _scroll_id }, + } = await this._client.search(esRequest); + + if (hits.total.value > global.kuzzle.config.limits.documentsWriteCount) { + throw kerror.get("services", "storage", "write_limit_exceeded"); + } + + let documents = hits.hits.map((h: JSONObject) => ({ + _id: h._id, + _source: h._source, + })); + + while (hits.total.value !== documents.length) { + ({ + body: { hits, _scroll_id }, + } = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: _scroll_id, + })); + + documents = documents.concat( + hits.hits.map((h: JSONObject) => ({ + _id: h._id, + _source: h._source, + })) + ); + } + + await this.clearScroll(_scroll_id); + + return documents; + } + + /** + * Clean and normalize the searchBody + * Ensure only allowed parameters are passed to ES + * + * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) + */ + _sanitizeSearchBody(searchBody) { + // Only allow a whitelist of top level properties + for (const key of Object.keys(searchBody)) { + if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { + throw kerror.get("services", "storage", "invalid_search_query", key); + } + } + + // Ensure that the body does not include a script + this._scriptCheck(searchBody); + + // Avoid empty queries that causes ES to respond with an error. + // Empty queries are turned into match_all queries + if (_.isEmpty(searchBody.query)) { + searchBody.query = { match_all: {} }; + } + + return searchBody; + } + + /** + * Throw if a script is used in the query. + * + * Only Stored Scripts are accepted + * + * @param {Object} object + */ + _scriptCheck(object) { + for (const [key, value] of Object.entries(object)) { + if (this.scriptKeys.includes(key)) { + for (const scriptArg of Object.keys(value)) { + if (!this.scriptAllowedArgs.includes(scriptArg)) { + throw kerror.get( + "services", + "storage", + "invalid_query_keyword", + `${key}.${scriptArg}` + ); + } + } + } + // Every object must be checked here, even the ones nested into an array + else if (typeof value === "object" && value !== null) { + this._scriptCheck(value); + } + } + } + + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isCollectionNameValid(name) { + return _isObjectNameValid(name); + } + + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isIndexNameValid(name) { + return _isObjectNameValid(name); + } + + /** + * Clears an allocated scroll + * @param {[type]} id [description] + * @returns {[type]} [description] + */ + async clearScroll(id?: string) { + if (id) { + debug("clearing scroll: %s", id); + await this._client.clearScroll({ scroll_id: id }); + } + } + + /** + * Loads a configuration value from services.storageEngine and assert a valid + * ms format. + * + * @param {String} key - relative path to the key in configuration + * + * @returns {Number} milliseconds + */ + _loadMsConfig(key) { + const configValue = _.get(this._config, key); + + assert( + typeof configValue === "string", + `services.storageEngine.${key} must be a string.` + ); + + const parsedValue = ms(configValue); + + assert( + typeof parsedValue === "number", + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` + ); + + return parsedValue; + } + + /** + * Returns true if one of the mappings dynamic property changes value from + * false to true + */ + _dynamicChanges(previousMappings, newMappings) { + const previousValues = findDynamic(previousMappings); + + for (const [path, previousValue] of Object.entries(previousValues)) { + if (previousValue.toString() !== "false") { + continue; + } + + const newValue = _.get(newMappings, path); + + if (newValue && newValue.toString() !== "false") { + return true; + } + } + + return false; + } + + async waitForElasticsearch() { + if (esState !== esStateEnum.NONE) { + while (esState !== esStateEnum.OK) { + await Bluebird.delay(1000); + } + + return; + } + + esState = esStateEnum.AWAITING; + + global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); + + while (esState !== esStateEnum.OK) { + try { + // Wait for at least 1 shard to be initialized + const health = await this._client.cluster.health({ + wait_for_no_initializing_shards: true, + }); + + if (health.body.number_of_pending_tasks === 0) { + global.kuzzle.log.info("[✔] Elasticsearch is ready"); + esState = esStateEnum.OK; + } else { + global.kuzzle.log.info( + `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining` + ); + await Bluebird.delay(1000); + } + } catch (e) { + await Bluebird.delay(1000); + } + } + } + + /** + * Checks if the dynamic properties are correct + */ + _checkDynamicProperty(mappings) { + const dynamicProperties = findDynamic(mappings); + for (const [path, value] of Object.entries(dynamicProperties)) { + // Prevent common mistake + if (typeof value === "boolean") { + _.set(mappings, path, value.toString()); + } else if (typeof value !== "string") { + throw kerror.get( + "services", + "storage", + "invalid_mapping", + path, + "Dynamic property value should be a string." + ); + } + + if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { + throw kerror.get( + "services", + "storage", + "invalid_mapping", + path, + `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( + '", "' + )}"` + ); + } + } + } + + _setLastActionToKuzzleMeta( + esRequest: JSONObject, + alias: string, + kuzzleMeta: JSONObject + ) { + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + let lastAction = ""; + const actionNames = ["index", "create", "update", "delete"]; + + for (let i = 0; i < esRequest.body.length; i++) { + const item = esRequest.body[i]; + const action = Object.keys(item)[0]; + + if (actionNames.indexOf(action) !== -1) { + lastAction = action; + + item[action]._index = alias; + + if (item[action]?._type) { + item[action]._type = undefined; + } + } else if (lastAction === "index" || lastAction === "create") { + item._kuzzle_info = kuzzleMeta.created; + } else if (lastAction === "update") { + this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); + } + } + /* end critical code section */ + } + + _setLastActionToKuzzleMetaUpdate(item: JSONObject, kuzzleMeta: JSONObject) { + for (const prop of ["doc", "upsert"]) { + if (isPlainObject(item[prop])) { + item[prop]._kuzzle_info = kuzzleMeta.updated; + } + } + } +} + +/** + * Finds paths and values of mappings dynamic properties + * + * @example + * + * findDynamic(mappings); + * { + * "properties.metadata.dynamic": "true", + * "properties.user.properties.address.dynamic": "strict" + * } + */ +function findDynamic(mappings, path = [], results = {}) { + if (mappings.dynamic !== undefined) { + results[path.concat("dynamic").join(".")] = mappings.dynamic; + } + + for (const [key, value] of Object.entries(mappings)) { + if (isPlainObject(value)) { + findDynamic(value, path.concat(key), results); + } + } + + return results; +} + +/** + * Forbids the use of the _routing ES option + * + * @param {Object} esRequest + * @throws + */ +function assertNoRouting(esRequest) { + if (esRequest.body._routing) { + throw kerror.get("services", "storage", "no_routing"); + } +} + +/** + * Checks if the optional "refresh" argument is well-formed + * + * @param {Object} esRequest + * @throws + */ +function assertWellFormedRefresh(esRequest) { + if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { + throw kerror.get( + "services", + "storage", + "invalid_argument", + "refresh", + '"wait_for", false' + ); + } +} + +function getKuid(userId: string): string | null { + if (!userId) { + return null; + } + + return String(userId); +} + +/** + * Checks if an index or collection name is valid + * + * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html + * + * Beware of the length check: ES allows indice names up to 255 bytes, but since + * in Kuzzle we emulate collections as indices, we have to make sure + * that the privacy prefix, the index name, the separator and the collection + * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way + * is to limit index and collection names to 126 bytes and document that + * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) + * + * @param {string} name + * @returns {Boolean} + */ +function _isObjectNameValid(name: string): boolean { + if (typeof name !== "string" || name.length === 0) { + return false; + } + + if (name.toLowerCase() !== name) { + return false; + } + + if (Buffer.from(name).length > 126) { + return false; + } + + if (name === "_all") { + return false; + } + + let valid = true; + + for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { + valid = !name.includes(FORBIDDEN_CHARS[i]); + } + + return valid; +} diff --git a/lib/service/storage/7/esWrapper.js b/lib/service/storage/7/esWrapper.js new file mode 100644 index 0000000000..aa5563953d --- /dev/null +++ b/lib/service/storage/7/esWrapper.js @@ -0,0 +1,303 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint sort-keys: 0 */ + +"use strict"; + +const Bluebird = require("bluebird"); +const _ = require("lodash"); +const es = require("sdk-es7"); + +const { KuzzleError } = require("../../../kerror/errors"); +const debug = require("../../../util/debug")( + "kuzzle:services:storage:ESCommon", +); +const kerror = require("../../../kerror").wrap("services", "storage"); + +const errorMessagesMapping = [ + { + regex: + /^\[es_rejected_execution_exception] rejected execution .*? on EsThreadPoolExecutor\[(.*?), .*$/, + subCode: "too_many_operations", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // [illegal_argument_exception] object mapping [titi] can't be changed from nested to non-nested + regex: + /^\[illegal_argument_exception] object mapping \[(.*?)] can't be changed from nested to non-nested$/, + subcode: "cannot_change_mapping", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // [illegal_argument_exception] object mapping [baz] can't be changed from non-nested to nested + regex: + /^\[illegal_argument_exception] object mapping \[(.*?)] can't be changed from non-nested to nested$/, + subcode: "cannot_change_mapping", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // [illegal_argument_exception] Can't merge a non object mapping [aeaze] with an object mapping [aeaze] + regex: + /^\[illegal_argument_exception] Can't merge a non object mapping \[(.*?)] with an object mapping \[(.*?)]$/, + subcode: "cannot_change_mapping", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // [illegal_argument_exception] [tutu.tutu] is defined as an object in mapping [aze] but this name is already used for a field in other types + regex: + /^\[illegal_argument_exception] \[(.*?)] is defined as an object in mapping \[(.*?)] but this name is already used for a field in other types$/, + subcode: "duplicate_field_mapping", + getPlaceholders: (esError, matches) => [matches[1], matches[2]], + }, + { + // [illegal_argument_exception] mapper [source.flags] of different type, current_type [string], merged_type [long] + regex: + /^mapper \[(.*?)] of different type, current_type \[(.*?)], merged_type \[(.*?)]$/, + subcode: "cannot_change_mapping", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // [mapper_parsing_exception] Mapping definition for [flags] has unsupported parameters: [index : not_analyzed] + // eslint-disable-next-line no-regex-spaces + regex: + /^\[mapper_parsing_exception] Mapping definition for \[(.*?)] has unsupported parameters: \[(.*?)]$/, + subcode: "unexpected_properties", + getPlaceholders: (esError, matches) => [matches[2], matches[1]], + }, + { + // [mapper_parsing_exception] No handler for type [boolean] declared on field [not] + regex: + /^\[mapper_parsing_exception] No handler for type \[(.*?)] declared on field \[(.*?)]$/, + subcode: "invalid_mapping_type", + getPlaceholders: (esError, matches) => [matches[2], matches[1]], + }, + { + // [mapper_parsing_exception] failed to parse [conditions.host.flags] + regex: /^\[mapper_parsing_exception] failed to parse \[(.*?)]$/, + subcode: "wrong_mapping_property", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // Failed to parse mapping [_doc]: Expected map for property [fields] on field [enabled] but got a class java.lang.String + regex: + /^Failed to parse mapping \[.*\]: Expected \w+ for property \[(.*)\] on field \[(.*)\]/, + subcode: "wrong_mapping_property", + getPlaceholders: (esError, matches) => [`${matches[2]}.${matches[1]}`], + }, + { + // [index_not_found_exception] no such index, with { resource.type=index_or_alias & resource.id=foso & index=foso } + regex: /^no such index \[([%&])(.*)\.(.*)\]$/, + subcode: "unknown_collection", + getPlaceholders: (esError, matches) => [matches[2], matches[3]], + }, + { + // [mapper_parsing_exception] Expected map for property [fields] on field [foo] but got a class java.lang.String + regex: + /^\[mapper_parsing_exception] Expected map for property \[(.*?)] on field \[(.*?)] but got a class java\.lang\.String$/, + subcode: "wrong_mapping_property", + getPlaceholders: (esError, matches) => [`${matches[2]}.${matches[1]}`], + }, + { + regex: + /^\[version_conflict_engine_exception] \[data]\[(.*?)]: version conflict.*$/, + subcode: "too_many_changes", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + //[liia]: version conflict, document already exists (current version [2]) + regex: /^\[(.*)\]: version conflict, document already exists.*/, + subcode: "document_already_exists", + getPlaceholders: () => [], + }, + { + // Unknown key for a START_OBJECT in [term]. + regex: /^Unknown key for a START_OBJECT in \[(.*)\].*/, + subcode: "invalid_search_query", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // mapping set to strict, dynamic introduction of [lehuong] within [_doc] is not allowed + regex: + /^mapping set to strict, dynamic introduction of \[(.+)\] within \[.+\] is not allowed/, + subcode: "strict_mapping_rejection", + getPlaceholders: (esError, matches) => { + // "/%26index.collection/_doc" + const esPath = esError.meta.meta.request.params.path; + // keep only "index" + const index = esPath.split(".")[0].split("%26")[1]; + // keep only "collection" + const collection = esPath.substr(esPath.indexOf(".") + 1).split("/")[0]; + + return [matches[1], index, collection]; + }, + }, + { + // [and] query malformed, no start_object after query name + regex: /^\[(.*)\] query malformed, no start_object after query name/, + subcode: "unknown_query_keyword", + getPlaceholders: (esError, matches) => [matches[1]], + }, + { + // no [query] registered for [equals] + regex: /^no \[query\] registered for \[(.*)\]/, + subcode: "unknown_query_keyword", + getPlaceholders: (esError, matches) => [matches[1]], + }, +]; + +class ESWrapper { + constructor(client) { + this.client = client; + } + + /** + * Transforms raw ES errors into a normalized Kuzzle version + * + * @param {Error} error + * @returns {KuzzleError} + */ + formatESError(error) { + if (error instanceof KuzzleError) { + return error; + } + + global.kuzzle.emit("services:storage:error", { + message: `Elasticsearch Client error: ${error.message}`, + // /!\ not all ES error classes have a "meta" property + meta: error.meta || null, + stack: error.stack, + }); + + if (error instanceof es.errors.NoLivingConnectionsError) { + throw kerror.get("not_connected"); + } + const message = _.get(error, "meta.body.error.reason", error.message); + + // Try to match a known elasticsearch error + for (const betterError of errorMessagesMapping) { + const matches = message.match(betterError.regex); + + if (matches) { + return kerror.get( + betterError.subcode, + ...betterError.getPlaceholders(error, matches), + ); + } + } + + // Try to match using error codes + if (error.meta) { + switch (error.meta.statusCode) { + case 400: + return this._handleBadRequestError(error, message); + case 404: + return this._handleNotFoundError(error, message); + case 409: + return this._handleConflictError(error, message); + default: + break; + } + } + + return this._handleUnknownError(error, message); + } + + reject(error) { + return Bluebird.reject(this.formatESError(error)); + } + + _handleConflictError(error, message) { + debug('unhandled "Conflict" elasticsearch error: %a', error); + + return kerror.get("unexpected_error", message); + } + + _handleNotFoundError(error, message) { + let errorMessage = message; + + if (!error.body._index) { + return kerror.get("unexpected_not_found", errorMessage); + } + + // _index= "&nyc-open-data.yellow-taxi" + const index = error.body._index.split(".")[0].slice(1); + const collection = error.body._index.split(".")[1]; + + // 404 on a GET document + if (error.body.found === false) { + return kerror.get("not_found", error.body._id, index, collection); + } + + // 404 on DELETE document (ES error payloads are so cool!) + if (error.meta.body._id) { + return kerror.get("not_found", error.meta.body._id, index, collection); + } + + if (error.meta.body && error.meta.body.error) { + errorMessage = error.meta.body.error + ? `${error.meta.body.error.reason}: ${error.meta.body.error["resource.id"]}` + : `${error.message}: ${error.body._id}`; + } + + debug('unhandled "NotFound" elasticsearch error: %a', error); + + return kerror.get("unexpected_not_found", errorMessage); + } + + _handleBadRequestError(error, message) { + let errorMessage = message; + + if (error.meta.body && error.meta.body.error) { + errorMessage = error.meta.body.error.root_cause + ? error.meta.body.error.root_cause[0].reason + : error.meta.body.error.reason; + + // empty query throws exception with ES 7 + if ( + error.meta.body.error.type === "parsing_exception" && + _.get(error, "meta.body.error.caused_by.type") === + "illegal_argument_exception" + ) { + errorMessage = error.meta.body.error.caused_by.reason; + } + } + + debug( + 'unhandled "BadRequest" elasticsearch error: %a', + _.get(error, "meta.body.error.reason", error.message), + ); + + return kerror.get("unexpected_bad_request", errorMessage); + } + + _handleUnknownError(error, message) { + debug( + "unhandled elasticsearch error (unhandled type: %s): %o", + _.get(error, "error.meta.statusCode", ""), + error, + ); + + return kerror.get("unexpected_error", message); + } +} + +module.exports = ESWrapper; diff --git a/lib/service/storage/8/elasticsearch.js b/lib/service/storage/8/elasticsearch.js new file mode 100644 index 0000000000..e3efb3a9ae --- /dev/null +++ b/lib/service/storage/8/elasticsearch.js @@ -0,0 +1,2922 @@ +"use strict"; +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ES8 = void 0; +const lodash_1 = __importDefault(require("lodash")); +const sdk_es8_1 = require("sdk-es8"); +const assert_1 = __importDefault(require("assert")); +const ms_1 = __importDefault(require("ms")); +const bluebird_1 = __importDefault(require("bluebird")); +const semver_1 = __importDefault(require("semver")); +const debug_1 = __importDefault(require("../../../util/debug")); +const esWrapper_1 = __importDefault(require("./esWrapper")); +const queryTranslator_1 = __importDefault(require("../commons/queryTranslator")); +const didYouMean_1 = __importDefault(require("../../../util/didYouMean")); +const kerror = __importStar(require("../../../kerror")); +const requestAssertions_1 = require("../../../util/requestAssertions"); +const safeObject_1 = require("../../../util/safeObject"); +const storeScopeEnum_1 = __importDefault(require("../../../core/storage/storeScopeEnum")); +const extractFields_1 = __importDefault(require("../../../util/extractFields")); +const mutex_1 = require("../../../util/mutex"); +const name_generator_1 = require("../../../util/name-generator"); +(0, debug_1.default)("kuzzle:services:elasticsearch"); +const SCROLL_CACHE_PREFIX = "_docscroll_"; +const ROOT_MAPPING_PROPERTIES = [ + "properties", + "_meta", + "dynamic", + "dynamic_templates", +]; +const CHILD_MAPPING_PROPERTIES = ["type"]; +// Used for collection emulation +const HIDDEN_COLLECTION = "_kuzzle_keep"; +const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS +const PRIVATE_PREFIX = "%"; +const PUBLIC_PREFIX = "&"; +const INDEX_PREFIX_POSITION_IN_INDICE = 0; +const INDEX_PREFIX_POSITION_IN_ALIAS = 1; +const NAME_SEPARATOR = "."; +const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; +const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; +// used to check whether we need to wait for ES to initialize or not +var esStateEnum; +(function (esStateEnum) { + esStateEnum[esStateEnum["AWAITING"] = 1] = "AWAITING"; + esStateEnum[esStateEnum["NONE"] = 2] = "NONE"; + esStateEnum[esStateEnum["OK"] = 3] = "OK"; +})(esStateEnum || (esStateEnum = {})); +let esState = esStateEnum.NONE; +/** + * @param {Kuzzle} kuzzle kuzzle instance + * @param {Object} config Service configuration + * @param {storeScopeEnum} scope + * @constructor + */ +class ES8 { + constructor(config, scope = storeScopeEnum_1.default.PUBLIC) { + this._config = config; + this._scope = scope; + this._indexPrefix = + scope === storeScopeEnum_1.default.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; + this._client = null; + this._esWrapper = null; + this._esVersion = null; + this._translator = new queryTranslator_1.default(); + // Allowed root key of a search query + this.searchBodyKeys = [ + "aggregations", + "aggs", + "collapse", + "explain", + "fields", + "from", + "highlight", + "query", + "search_after", + "search_timeout", + "size", + "sort", + "suggest", + "_name", + "_source", + "_source_excludes", + "_source_includes", + ]; + /** + * Only allow stored-scripts in queries + */ + this.scriptKeys = ["script", "_script"]; + this.scriptAllowedArgs = ["id", "params"]; + this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); + this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); + } + get scope() { + return this._scope; + } + /** + * Initializes the elasticsearch client + * + * @override + * @returns {Promise} + */ + async _initSequence() { + if (this._client) { + return; + } + if (global.NODE_ENV !== "development" && + this._config.commonMapping.dynamic === "true") { + global.kuzzle.log.warn([ + "Your dynamic mapping policy is set to 'true' for new fields.", + "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", + 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', + ].join("\n")); + } + this._client = new sdk_es8_1.Client(this._config.client); + await this.waitForElasticsearch(); + this._esWrapper = new esWrapper_1.default(this._client); + const { version } = await this._client.info(); + if (version && !semver_1.default.satisfies(semver_1.default.coerce(version.number), "^8.0.0")) { + throw kerror.get("services", "storage", "version_mismatch", version.number); + } + this._esVersion = version; + } + /** + * Translate Koncorde filters to Elasticsearch query + * + * @param {Object} filters - Set of valid Koncorde filters + * @returns {Object} Equivalent Elasticsearch query + */ + translateKoncordeFilters(filters) { + return this._translator.translate(filters); + } + /** + * Returns some basic information about this service + * @override + * + * @returns {Promise.} service informations + */ + async info() { + const result = { + type: "elasticsearch", + version: this._esVersion, + }; + try { + const info = await this._client.info(); + result.version = info.version.number; + result.lucene = info.version.lucene_version; + const health = await this._client.cluster.health(); + result.status = health.status; + const stats = await this._client.cluster.stats({ human: true }); + result.spaceUsed = stats.indices.store.size; + result.nodes = stats.nodes; + return result; + } + catch (error) { + return this._esWrapper.reject(error); + } + } + /** + * Returns detailed multi-level storage stats data + * + * @returns {Promise.} + */ + async stats() { + const esRequest = { + metric: ["docs", "store"], + }; + const stats = await this._client.indices.stats(esRequest); + const indexes = {}; + let size = 0; + for (const [indice, indiceInfo] of Object.entries(stats.indices)) { + const infos = indiceInfo; + // Ignore non-Kuzzle indices + if (!indice.startsWith(PRIVATE_PREFIX) && + !indice.startsWith(PUBLIC_PREFIX)) { + continue; + } + const aliases = await this._getAliasFromIndice(indice); + const alias = aliases[0]; + const indexName = this._extractIndex(alias); + const collectionName = this._extractCollection(alias); + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + collectionName === HIDDEN_COLLECTION) { + continue; + } + if (!indexes[indexName]) { + indexes[indexName] = { + collections: [], + name: indexName, + size: 0, + }; + } + indexes[indexName].collections.push({ + documentCount: infos.total.docs.count, + name: collectionName, + size: infos.total.store.size_in_bytes, + }); + indexes[indexName].size += infos.total.store.size_in_bytes; + size += infos.total.store.size_in_bytes; + } + return { + indexes: Object.values(indexes), + size, + }; + } + /** + * Scrolls results from previous elasticsearch query. + * Automatically clears the scroll context after the last result page has + * been fetched. + * + * @param {String} scrollId - Scroll identifier + * @param {Object} options - scrollTTL (default scrollTTL) + * + * @returns {Promise.<{ scrollId, hits, aggregations, total }>} + */ + async scroll(scrollId, { scrollTTL } = {}) { + const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; + const esRequest = { + scroll: _scrollTTL, + scroll_id: scrollId, + }; + const cacheKey = SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); + (0, debug_1.default)("Scroll: %o", esRequest); + if (_scrollTTL) { + const scrollDuration = (0, ms_1.default)(_scrollTTL); + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get("services", "storage", "scroll_duration_too_great", _scrollTTL); + } + } + const stringifiedScrollInfo = await global.kuzzle.ask("core:cache:internal:get", cacheKey); + if (!stringifiedScrollInfo) { + throw kerror.get("services", "storage", "unknown_scroll_id"); + } + const scrollInfo = JSON.parse(stringifiedScrollInfo); + try { + const body = await this._client.scroll(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + scrollInfo.fetched += body.hits.hits.length; + if (scrollInfo.fetched >= totalHitsValue) { + (0, debug_1.default)("Last scroll page fetched: deleting scroll %s", body._scroll_id); + await global.kuzzle.ask("core:cache:internal:del", cacheKey); + await this.clearScroll(body._scroll_id); + } + else { + await global.kuzzle.ask("core:cache:internal:store", cacheKey, JSON.stringify(scrollInfo), { + ttl: (0, ms_1.default)(_scrollTTL) || this.scrollTTL, + }); + } + const remaining = totalHitsValue - scrollInfo.fetched; + return await this._formatSearchResult(body, remaining, scrollInfo); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Searches documents from elasticsearch with a query + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * @param {Object} options - from (undefined), size (undefined), scroll (undefined) + * + * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} + */ + async search({ index, collection, searchBody, targets, } = {}, { from, size, scroll, } = {}) { + let esIndexes; + if (targets && targets.length > 0) { + const indexes = new Set(); + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + indexes.add(alias); + } + } + esIndexes = Array.from(indexes).join(","); + } + else { + esIndexes = this._getAlias(index, collection); + } + const esRequest = { + ...this._sanitizeSearchBody(searchBody), + from, + index: esIndexes, + scroll, + size, + track_total_hits: true, + }; + if (scroll) { + const scrollDuration = (0, ms_1.default)(scroll); + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get("services", "storage", "scroll_duration_too_great", scroll); + } + } + (0, debug_1.default)("Search: %j", esRequest); + try { + const body = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + let remaining; + if (body._scroll_id) { + const ttl = (esRequest.scroll && (0, ms_1.default)(esRequest.scroll)) || + (0, ms_1.default)(this._config.defaults.scrollTTL); + await global.kuzzle.ask("core:cache:internal:store", SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), JSON.stringify({ + collection, + fetched: body.hits.hits.length, + index, + targets, + }), { ttl }); + remaining = totalHitsValue - body.hits.hits.length; + } + return await this._formatSearchResult(body, remaining, { + collection, + index, + targets, + }); + } + catch (error) { + console.error(error); + throw this._esWrapper.formatESError(error); + } + } + /** + * Generate a map that associate an alias to a pair of index and collection + * + * @param {*} targets + * @returns + */ + _mapTargetsToAlias(targets) { + const aliasToTargets = {}; + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + if (!aliasToTargets[alias]) { + aliasToTargets[alias] = { + collection: targetCollection, + index: target.index, + }; + } + } + } + return aliasToTargets; + } + async _formatSearchResult(body, remaining, searchInfo = {}) { + let aliasToTargets = {}; + const aliasCache = new Map(); + if (searchInfo.targets) { + /** + * We need to map the alias to the target index and collection, + * so we can later retrieve informations about an index & collection + * based on its alias. + */ + aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); + } + const formatHit = async (hit) => { + let index = searchInfo.index; + let collection = searchInfo.collection; + /** + * If the search has been done on multiple targets, we need to + * retrieve the appropriate index and collection based on the alias + */ + if (hit._index && searchInfo.targets) { + // Caching to reduce call to ES + let aliases = aliasCache.get(hit._index); + if (!aliases) { + // Retrieve all the alias associated to one index + aliases = await this._getAliasFromIndice(hit._index); + aliasCache.set(hit._index, aliases); + } + /** + * Since multiple alias can point to the same index in ES, we need to + * find the first alias that exists in the map of aliases associated + * to the targets. + */ + const alias = aliases.find((_alias) => aliasToTargets[_alias]); + // Retrieve index and collection information based on the matching alias + index = aliasToTargets[alias].index; + collection = aliasToTargets[alias].collection; + } + return { + _id: hit._id, + _score: hit._score, + _source: hit._source, + collection, + highlight: hit.highlight, + index, + }; + }; + async function formatInnerHits(innerHits) { + if (!innerHits) { + return undefined; + } + const formattedInnerHits = {}; + for (const [name, innerHit] of Object.entries(innerHits)) { + formattedInnerHits[name] = await bluebird_1.default.map(innerHit.hits.hits, formatHit); + } + return formattedInnerHits; + } + const hits = await bluebird_1.default.map(body.hits.hits, async (hit) => ({ + inner_hits: await formatInnerHits(hit.inner_hits), + ...(await formatHit(hit)), + })); + return { + aggregations: body.aggregations, + hits, + remaining, + scrollId: body._scroll_id, + suggest: body.suggest, + total: body.hits.total.value, + }; + } + /** + * Gets the document with given ID + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async get(index, collection, id) { + const esRequest = { + id, + index: this._getAlias(index, collection), + }; + // Just in case the user make a GET on url /mainindex/test/_search + // Without this test we return something weird: a result.hits.hits with all + // document without filter because the body is empty in HTTP by default + if (esRequest.id === "_search") { + return kerror.reject("services", "storage", "search_as_an_id"); + } + (0, debug_1.default)("Get document: %o", esRequest); + try { + const body = await this._client.get(esRequest); + return { + _id: body._id, + _source: body._source, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Returns the list of documents matching the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mGet(index, collection, ids) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + const esRequest = { + docs: ids.map((_id) => ({ + _id, + _index: this._getAlias(index, collection), + })), + }; + (0, debug_1.default)("Multi-get documents: %o", esRequest); + let body; + try { + body = await this._client.mget(esRequest); // NOSONAR + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + const errors = []; + const items = []; + for (const doc of body.docs) { + if (!("error" in doc) && doc.found) { + items.push({ + _id: doc._id, + _source: doc._source, + _version: doc._version, + }); + } + else { + errors.push(doc._id); + } + } + return { errors, items }; + } + /** + * Counts how many documents match the filter given in body + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * + * @returns {Promise.} count + */ + async count(index, collection, searchBody = {}) { + const esRequest = { + ...this._sanitizeSearchBody(searchBody), + index: this._getAlias(index, collection), + }; + (0, debug_1.default)("Count: %o", esRequest); + try { + const body = await this._client.count(esRequest); + return body.count; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends the new document to elasticsearch + * Cleans data to match elasticsearch specifications + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} content - Document content + * @param {Object} options - id (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { _id, _version, _source } + */ + async create(index, collection, content, { id, refresh, userId = null, injectKuzzleMeta = true, } = {}) { + (0, requestAssertions_1.assertIsObject)(content); + const esRequest = { + document: content, + id, + index: this._getAlias(index, collection), + op_type: id ? "create" : "index", + refresh, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + // Add metadata + if (injectKuzzleMeta) { + esRequest.document._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }; + } + (0, debug_1.default)("Create document: %o", esRequest); + try { + const body = await this._client.index(esRequest); + return { + _id: body._id, + _source: esRequest.document, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Creates a new document to Elasticsearch, or replace it if it already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) + * + * @returns {Promise.} { _id, _version, _source, created } + */ + async createOrReplace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { + const esRequest = { + document: content, + id, + index: this._getAlias(index, collection), + refresh, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + // Add metadata + if (injectKuzzleMeta) { + esRequest.document._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + (0, debug_1.default)("Create or replace document: %o", esRequest); + try { + const body = await this._client.index(esRequest); + return { + _id: body._id, + _source: esRequest.document, + _version: body._version, + created: body.result === "created", // Needed by the notifier + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends the partial document to elasticsearch with the id to update + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async update(index, collection, id, content, { refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { + const esRequest = { + _source: true, + doc: content, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + if (injectKuzzleMeta) { + // Add metadata + esRequest.doc._kuzzle_info = { + ...esRequest.doc._kuzzle_info, + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + (0, debug_1.default)("Update document: %o", esRequest); + try { + const body = await this._client.update(esRequest); + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends the partial document to elasticsearch with the id to update + * Creates the document if it doesn't already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async upsert(index, collection, id, content, { defaultValues = {}, refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { + const esRequest = { + _source: true, + doc: content, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + upsert: { ...defaultValues, ...content }, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + // Add metadata + const user = getKuid(userId); + const now = Date.now(); + if (injectKuzzleMeta) { + esRequest.doc._kuzzle_info = { + ...esRequest.doc._kuzzle_info, + updatedAt: now, + updater: user, + }; + esRequest.upsert._kuzzle_info = { + ...esRequest.upsert._kuzzle_info, + author: user, + createdAt: now, + }; + } + (0, debug_1.default)("Upsert document: %o", esRequest); + try { + const body = await this._client.update(esRequest); + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + created: body.result === "created", + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Replaces a document to Elasticsearch + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async replace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { + const alias = this._getAlias(index, collection); + const esRequest = { + document: content, + id, + index: alias, + refresh, + }; + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + if (injectKuzzleMeta) { + // Add metadata + esRequest.document._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + try { + const exists = await this._client.exists({ id, index: alias }); + if (!exists) { + throw kerror.get("services", "storage", "not_found", id, index, collection); + } + (0, debug_1.default)("Replace document: %o", esRequest); + const body = await this._client.index(esRequest); + return { + _id: id, + _source: esRequest.document, + _version: body._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Sends to elasticsearch the document id to delete + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} options - refresh (undefined) + * + * @returns {Promise} + */ + async delete(index, collection, id, { refresh, } = {}) { + const esRequest = { + id, + index: this._getAlias(index, collection), + refresh, + }; + assertWellFormedRefresh(esRequest); + (0, debug_1.default)("Delete document: %o", esRequest); + try { + await this._client.delete(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + return null; + } + /** + * Deletes all documents matching the provided filters. + * If fetch=false, the max documents write limit is not applied. + * + * Options: + * - size: size of the batch to retrieve documents (no-op if fetch=false) + * - refresh: refresh option for ES + * - fetch: if true, will fetch the documents before delete them + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} options - size (undefined), refresh (undefined), fetch (true) + * + * @returns {Promise.<{ documents, total, deleted, failures: Array<{ id, reason }> }>} + */ + async deleteByQuery(index, collection, query, { refresh, size = 1000, fetch = true, } = {}) { + const esRequest = { + ...this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + }; + if (!(0, safeObject_1.isPlainObject)(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + try { + let documents = []; + if (fetch) { + documents = await this._getAllDocumentsFromQuery({ + ...esRequest, + size, + }); + } + (0, debug_1.default)("Delete by query: %o", esRequest); + esRequest.refresh = refresh === "wait_for" ? true : refresh; + const request = { + ...esRequest, + max_docs: size, + }; + if (request.max_docs === -1) { + request.max_docs = undefined; + } + const body = await this._client.deleteByQuery(request); + return { + deleted: body.deleted, + documents, + failures: body.failures.map(({ id, cause }) => ({ + id, + reason: cause.reason, + })), + total: body.total, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Delete fields of a document and replace it + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Array} fields - Document fields to be removed + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async deleteFields(index, collection, id, fields, { refresh, userId = null, } = {}) { + const alias = this._getAlias(index, collection); + const esRequest = { + id, + index: alias, + }; + try { + (0, debug_1.default)("DeleteFields document: %o", esRequest); + const body = await this._client.get(esRequest); + for (const field of fields) { + if (lodash_1.default.has(body._source, field)) { + lodash_1.default.set(body._source, field, undefined); + } + } + const updatedInfos = { + updatedAt: Date.now(), + updater: getKuid(userId), + }; + if (typeof body._source._kuzzle_info === "object") { + body._source._kuzzle_info = { + ...body._source._kuzzle_info, + ...updatedInfos, + }; + } + else { + body._source._kuzzle_info = updatedInfos; + } + const newEsRequest = { + document: body._source, + id, + index: alias, + refresh, + }; + assertNoRouting(newEsRequest); + assertWellFormedRefresh(newEsRequest); + const updated = await this._client.index(newEsRequest); + return { + _id: id, + _source: body._source, + _version: updated._version, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined), size (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async updateByQuery(index, collection, query, changes, { refresh, size = 1000, userId = null, } = {}) { + try { + const esRequest = { + ...this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + size, + }; + const documents = await this._getAllDocumentsFromQuery(esRequest); + for (const document of documents) { + document._source = undefined; + document.body = changes; + } + (0, debug_1.default)("Update by query: %o", esRequest); + const { errors, items } = await this.mUpdate(index, collection, documents, { refresh, userId }); + return { + errors, + successes: items, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async bulkUpdateByQuery(index, collection, query, changes, { refresh = false, } = {}) { + const script = { + params: {}, + source: "", + }; + const flatChanges = (0, extractFields_1.default)(changes, { alsoExtractValues: true }); + for (const { key, value } of flatChanges) { + script.source += `ctx._source.${key} = params['${key}'];`; + script.params[key] = value; + } + const esRequest = { + index: this._getAlias(index, collection), + query: this._sanitizeSearchBody({ query }).query, + refresh, + script, + }; + (0, debug_1.default)("Bulk Update by query: %o", esRequest); + let response; + try { + response = await this._client.updateByQuery(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + if (response.failures.length) { + const errors = response.failures.map(({ id, cause }) => ({ + cause, + id, + })); + throw kerror.get("services", "storage", "incomplete_update", response.updated, errors); + } + return { + updated: response.updated, + }; + } + /** + * Execute the callback with a batch of documents of specified size until all + * documents matched by the query have been processed. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Function} callback - callback that will be called with the "hits" array + * @param {Object} options - size (10), scrollTTL ('5s') + * + * @returns {Promise.} Array of results returned by the callback + */ + async mExecute(index, collection, query, callback, { size = 10, scrollTTl = "5s", } = {}) { + const esRequest = { + ...this._sanitizeSearchBody({ query }), + from: 0, + index: this._getAlias(index, collection), + scroll: scrollTTl, + size, + }; + if (!(0, safeObject_1.isPlainObject)(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + const results = []; + let processed = 0; + let scrollId = null; + try { + let body = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + while (processed < totalHitsValue && body.hits.hits.length > 0) { + scrollId = body._scroll_id; + results.push(await callback(body.hits.hits)); + processed += body.hits.hits.length; + body = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: scrollId, + }); + } + } + finally { + await this.clearScroll(scrollId); + } + return results; + } + /** + * Creates a new index. + * + * This methods creates an hidden collection in the provided index to be + * able to list it. + * This methods resolves if the index name does not already exists either as + * private or public index. + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async createIndex(index) { + this._assertValidIndexAndCollection(index); + let body; + try { + body = await this._client.cat.aliases({ format: "json" }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias: name }) => name); + for (const alias of aliases) { + const indexName = this._extractIndex(alias); + if (index === indexName) { + const indexType = alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX + ? "private" + : "public"; + throw kerror.get("services", "storage", "index_already_exists", indexType, index); + } + } + await this._createHiddenCollection(index); + return null; + } + /** + * Creates an empty collection. + * Mappings and settings will be applied if supplied. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async createCollection(index, collection, { mappings = {}, settings = {}, } = {}) { + this._assertValidIndexAndCollection(index, collection); + if (collection === HIDDEN_COLLECTION) { + throw kerror.get("services", "storage", "collection_reserved", HIDDEN_COLLECTION); + } + const mutex = new mutex_1.Mutex(`hiddenCollection/create/${index}`); + try { + await mutex.lock(); + if (await this._hasHiddenCollection(index)) { + await this.deleteCollection(index, HIDDEN_COLLECTION); + } + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + finally { + await mutex.unlock(); + } + const esRequest = { + aliases: { + [this._getAlias(index, collection)]: {}, + }, + index: await this._getAvailableIndice(index, collection), + mappings: {}, + settings, + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + this._checkDynamicProperty(mappings); + const exists = await this.hasCollection(index, collection); + if (exists) { + return this.updateCollection(index, collection, { mappings, settings }); + } + this._checkMappings(mappings); + esRequest.mappings = { + _meta: mappings._meta || this._config.commonMapping._meta, + dynamic: mappings.dynamic || this._config.commonMapping.dynamic, + properties: lodash_1.default.merge(mappings.properties, this._config.commonMapping.properties), + }; + esRequest.settings.number_of_replicas = + esRequest.settings.number_of_replicas || + this._config.defaultSettings.number_of_replicas; + esRequest.settings.number_of_shards = + esRequest.settings.number_of_shards || + this._config.defaultSettings.number_of_shards; + try { + await this._client.indices.create(esRequest); + } + catch (error) { + if (lodash_1.default.get(error, "meta.body.error.type") === + "resource_already_exists_exception") { + // race condition: the indice has been created between the "exists" + // check above and this "create" attempt + return null; + } + throw this._esWrapper.formatESError(error); + } + return null; + } + /** + * Retrieves settings definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.<{ settings }>} + */ + async getSettings(index, collection) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + (0, debug_1.default)("Get settings: %o", esRequest); + try { + const body = await this._client.indices.getSettings(esRequest); + return body[indice].settings.index; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Retrieves mapping definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} options - includeKuzzleMeta (false) + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async getMapping(index, collection, { includeKuzzleMeta = false, } = {}) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + (0, debug_1.default)("Get mapping: %o", esRequest); + try { + const body = await this._client.indices.getMapping(esRequest); + const properties = includeKuzzleMeta + ? body[indice].mappings.properties + : lodash_1.default.omit(body[indice].mappings.properties, "_kuzzle_info"); + return { + _meta: body[indice].mappings._meta, + dynamic: body[indice].mappings.dynamic, + properties, + }; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Updates a collection mappings and settings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async updateCollection(index, collection, { mappings = {}, settings = {}, } = {}) { + const esRequest = { + index: await this._getIndice(index, collection), + }; + // If either the putMappings or the putSettings operation fail, we need to + // rollback the whole operation. Since mappings can't be rollback, we try to + // update the settings first, then the mappings and we rollback the settings + // if putMappings fail. + let indexSettings; + try { + indexSettings = await this._getSettings(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + if (!lodash_1.default.isEmpty(settings)) { + await this.updateSettings(index, collection, settings); + } + try { + if (!lodash_1.default.isEmpty(mappings)) { + const previousMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + await this.updateMapping(index, collection, mappings); + if (this._dynamicChanges(previousMappings, mappings)) { + await this.updateSearchIndex(index, collection); + } + } + } + catch (error) { + const allowedSettings = this.getAllowedIndexSettings(indexSettings); + // Rollback to previous settings + if (!lodash_1.default.isEmpty(settings)) { + await this.updateSettings(index, collection, allowedSettings); + } + throw error; + } + return null; + } + /** + * Given index settings we return a new version of index settings + * only with allowed settings that can be set (during update or create index). + * @param indexSettings the index settings + * @returns {{index: *}} a new index settings with only allowed settings. + */ + getAllowedIndexSettings(indexSettings) { + return { + index: lodash_1.default.omit(indexSettings.index, [ + "creation_date", + "provided_name", + "uuid", + "version", + ]), + }; + } + /** + * Sends an empty UpdateByQuery request to update the search index + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @returns {Promise.} {} + */ + async updateSearchIndex(index, collection) { + const esRequest = { + // @cluster: conflicts when two nodes start at the same time + conflicts: "proceed", + index: this._getAlias(index, collection), + refresh: true, + // This operation can take some time: this should be an ES + // background task. And it's preferable to a request timeout when + // processing large indexes. + wait_for_completion: false, + }; + (0, debug_1.default)("UpdateByQuery: %o", esRequest); + try { + await this._client.updateByQuery(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Update a collection mappings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} mappings - Collection mappings in ES format + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async updateMapping(index, collection, mappings = {}) { + let esRequest = { + index: this._getAlias(index, collection), + }; + this._checkDynamicProperty(mappings); + const collectionMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + this._checkMappings(mappings); + esRequest = { + ...esRequest, + _meta: mappings._meta || collectionMappings._meta, + dynamic: mappings.dynamic || collectionMappings.dynamic, + properties: mappings.properties, + }; + (0, debug_1.default)("Update mapping: %o", esRequest); + try { + await this._client.indices.putMapping(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const fullProperties = lodash_1.default.merge(collectionMappings.properties, mappings.properties); + return { + _meta: esRequest._meta, + dynamic: esRequest.dynamic.toString(), + properties: fullProperties, + }; + } + /** + * Updates a collection settings (eg: analyzers) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} settings - Collection settings in ES format + * + * @returns {Promise} + */ + async updateSettings(index, collection, settings = {}) { + const esRequest = { + index: this._getAlias(index, collection), + }; + await this._client.indices.close(esRequest); + try { + await this._client.indices.putSettings({ ...esRequest, body: settings }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + finally { + await this._client.indices.open(esRequest); + } + return null; + } + /** + * Empties the content of a collection. Keep the existing mapping and settings. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async truncateCollection(index, collection) { + let mappings; + let settings; + const esRequest = { + index: await this._getIndice(index, collection), + }; + try { + mappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + settings = await this._getSettings(esRequest); + settings = { + ...settings, + ...this.getAllowedIndexSettings(settings), + }; + await this._client.indices.delete(esRequest); + await this._client.indices.create({ + ...esRequest, + aliases: { + [this._getAlias(index, collection)]: {}, + }, + mappings, + settings, + wait_for_active_shards: await this._getWaitForActiveShards(), + }); + return null; + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Runs several action and document + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents to import + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async import(index, collection, documents, { refresh, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection); + const dateNow = Date.now(); + const esRequest = { + operations: documents, + refresh, + timeout, + }; + const kuzzleMeta = { + created: { + author: getKuid(userId), + createdAt: dateNow, + updatedAt: null, + updater: null, + }, + updated: { + updatedAt: dateNow, + updater: getKuid(userId), + }, + }; + assertWellFormedRefresh(esRequest); + this._scriptCheck(documents); + this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); + let body; + try { + body = await this._client.bulk(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const result = { + errors: [], + items: [], + }; + let idx = 0; + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const row = body.items[i]; + const action = Object.keys(row)[0]; + const item = row[action]; + if (item.status >= 400) { + const error = { + _id: item._id, + status: item.status, + }; + // update action contain body in "doc" field + // the delete action is not followed by an action payload + if (action === "update") { + error._source = documents[idx + 1].doc; + error._source._kuzzle_info = undefined; + } + else if (action !== "delete") { + error._source = documents[idx + 1]; + error._source._kuzzle_info = undefined; + } + // ES response does not systematicaly include an error object + // (e.g. delete action with 404 status) + if (item.error) { + error.error = { + reason: item.error.reason, + type: item.error.type, + }; + } + result.errors.push({ [action]: error }); + } + else { + result.items.push({ + [action]: { + _id: item._id, + status: item.status, + }, + }); + } + // the delete action is not followed by an action payload + idx = action === "delete" ? idx + 1 : idx + 2; + } + /* end critical code section */ + return result; + } + /** + * Retrieves the complete list of existing collections in the current index + * + * @param {String} index - Index name + * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results + * + * @returns {Promise.} Collection names + */ + async listCollections(index, { includeHidden = false } = {}) { + let body; + try { + body = await this._client.cat.aliases({ format: "json" }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias }) => alias); + const schema = this._extractSchema(aliases, { includeHidden }); + return schema[index] || []; + } + /** + * Retrieves the complete list of indexes + * + * @returns {Promise.} Index names + */ + async listIndexes() { + let body; + try { + body = await this._client.cat.aliases({ format: "json" }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias }) => alias); + const schema = this._extractSchema(aliases); + return Object.keys(schema); + } + /** + * Returns an object containing the list of indexes and collections + * + * @returns {Object.} Object + */ + async getSchema() { + let body; + try { + body = await this._client.cat.aliases({ format: "json" }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = body.map(({ alias }) => alias); + const schema = this._extractSchema(aliases, { includeHidden: true }); + for (const [index, collections] of Object.entries(schema)) { + schema[index] = collections.filter((c) => c !== HIDDEN_COLLECTION); + } + return schema; + } + /** + * Retrieves the complete list of aliases + * + * @returns {Promise.} [ { alias, index, collection, indice } ] + */ + async listAliases() { + let body; + try { + body = await this._client.cat.aliases({ format: "json" }); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + const aliases = []; + for (const { alias, index: indice } of body) { + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { + aliases.push({ + alias, + collection: this._extractCollection(alias), + index: this._extractIndex(alias), + indice, + }); + } + } + return aliases; + } + /** + * Deletes a collection + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async deleteCollection(index, collection) { + const indice = await this._getIndice(index, collection); + const esRequest = { + index: indice, + }; + try { + await this._client.indices.delete(esRequest); + const alias = this._getAlias(index, collection); + if (await this._checkIfAliasExists(alias)) { + await this._client.indices.deleteAlias({ + index: indice, + name: alias, + }); + } + await this._createHiddenCollection(index); + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + return null; + } + /** + * Deletes multiple indexes + * + * @param {String[]} indexes - Index names + * + * @returns {Promise.} + */ + async deleteIndexes(indexes = []) { + if (indexes.length === 0) { + return bluebird_1.default.resolve([]); + } + const deleted = new Set(); + try { + const body = await this._client.cat.aliases({ format: "json" }); + const esRequest = body.reduce((request, { alias, index: indice }) => { + const index = this._extractIndex(alias); + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + !indexes.includes(index)) { + return request; + } + deleted.add(index); + request.index.push(indice); + return request; + }, { index: [] }); + if (esRequest.index.length === 0) { + return []; + } + (0, debug_1.default)("Delete indexes: %o", esRequest); + await this._client.indices.delete(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + return Array.from(deleted); + } + /** + * Deletes an index + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async deleteIndex(index) { + await this.deleteIndexes([index]); + return null; + } + /** + * Forces a refresh on the collection. + * + * /!\ Can lead to some performance issues. + * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} { _shards } + */ + async refreshCollection(index, collection) { + const esRequest = { + index: this._getAlias(index, collection), + }; + let body; + try { + body = await this._client.indices.refresh(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + return body; + } + /** + * Returns true if the document exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.} + */ + async exists(index, collection, id) { + const esRequest = { + id, + index: this._getAlias(index, collection), + }; + try { + return await this._client.exists(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Returns the list of documents existing with the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mExists(index, collection, ids) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + const esRequest = { + _source: "false", + docs: ids.map((_id) => ({ _id })), + index: this._getAlias(index, collection), + }; + (0, debug_1.default)("mExists: %o", esRequest); + let body; + try { + body = await this._client.mget(esRequest); // NOSONAR + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + const errors = []; + const items = []; + for (let i = 0; i < body.docs.length; i++) { + const doc = body.docs[i]; + if (!("error" in doc) && doc.found) { + items.push(doc._id); + } + else { + errors.push(doc._id); + } + } + return { errors, items }; + } + /** + * Returns true if the index exists + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async hasIndex(index) { + const indexes = await this.listIndexes(); + return indexes.some((idx) => idx === index); + } + /** + * Returns true if the collection exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} + */ + async hasCollection(index, collection) { + const collections = await this.listCollections(index); + return collections.some((col) => col === collection); + } + /** + * Returns true if the index has the hidden collection + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async _hasHiddenCollection(index) { + const collections = await this.listCollections(index, { + includeHidden: true, + }); + return collections.some((col) => col === HIDDEN_COLLECTION); + } + /** + * Creates multiple documents at once. + * If a content has no id, one is automatically generated and assigned to it. + * If a content has a specified identifier, it is rejected if it already exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mCreate(index, collection, documents, { refresh, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection), kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); + // prepare the mget request, but only for document having a specified id + const body = documentsToGet.length > 0 + ? await this._client.mget({ + docs: documentsToGet, + index: alias, + }) + : { docs: [] }; + const existingDocuments = body.docs; + const esRequest = { + index: alias, + operations: [], + refresh, + timeout, + }; + const toImport = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + // Documents are retrieved in the same order than we got them from user + if (typeof document._id === "string" && existingDocuments[idx]) { + const doc = existingDocuments[idx]; + if (!("error" in doc) && doc.found) { + document._source._kuzzle_info = undefined; + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document already exists", + status: 400, + }); + } + else { + esRequest.operations.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.operations.push(document._source); + toImport.push(document); + } + idx++; + } + else { + esRequest.operations.push({ index: { _index: alias } }); + esRequest.operations.push(document._source); + toImport.push(document); + } + } + /* end critical code section */ + return this._mExecute(esRequest, toImport, rejected); + } + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) + * + * @returns {Promise.<{ items, errors }> + */ + async mCreateOrReplace(index, collection, documents, { refresh, timeout, userId = null, injectKuzzleMeta = true, limits = true, source = true, } = {}) { + let kuzzleMeta = {}; + if (injectKuzzleMeta) { + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }; + } + const alias = this._getAlias(index, collection); + const esRequest = { + index: alias, + operations: [], + refresh, + timeout, + }; + const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); + esRequest.operations = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.operations.push({ + index: { + _id: extractedDocuments[i]._id, + _index: alias, + }, + }); + esRequest.operations.push(extractedDocuments[i]._source); + } + /* end critical code section */ + return this._mExecute(esRequest, extractedDocuments, rejected, { + limits, + source, + }); + } + /** + * Updates multiple documents with one request + * Replacements are rejected if targeted documents do not exist + * (like with the normal "update" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mUpdate(index, collection, documents, { refresh = undefined, retryOnConflict = 0, timeout = undefined, userId = null, } = {}) { + const alias = this._getAlias(index, collection), toImport = [], esRequest = { + index: alias, + operations: [], + refresh, + timeout, + }, kuzzleMeta = { + _kuzzle_info: { + updatedAt: Date.now(), + updater: getKuid(userId), + }, + }, { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const extractedDocument = extractedDocuments[i]; + if (typeof extractedDocument._id === "string") { + esRequest.operations.push({ + update: { + _id: extractedDocument._id, + _index: alias, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }); + // _source: true => makes ES return the updated document source in the + // response. Required by the real-time notifier component + esRequest.operations.push({ + _source: true, + doc: extractedDocument._source, + }); + toImport.push(extractedDocument); + } + else { + extractedDocument._source._kuzzle_info = undefined; + rejected.push({ + document: { + _id: extractedDocument._id, + body: extractedDocument._source, + }, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + const response = await this._mExecute(esRequest, toImport, rejected); + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + status: item.status, + })); + return response; + } + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async mUpsert(index, collection, documents, { refresh, retryOnConflict = 0, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection); + const esRequest = { + operations: [], + refresh, + timeout, + }; + const user = getKuid(userId); + const now = Date.now(); + const kuzzleMeta = { + doc: { + _kuzzle_info: { + updatedAt: now, + updater: user, + }, + }, + upsert: { + _kuzzle_info: { + author: user, + createdAt: now, + }, + }, + }; + const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta, { + prepareMUpsert: true, + requireId: true, + }); + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.operations.push({ + update: { + _id: extractedDocuments[i]._id, + _index: alias, + _source: true, + retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }, { + doc: extractedDocuments[i]._source.changes, + upsert: extractedDocuments[i]._source.default, + }); + // _source: true + // Makes ES return the updated document source in the response. + // Required by the real-time notifier component + } + /* end critical code section */ + const response = await this._mExecute(esRequest, extractedDocuments, rejected); + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + created: item.result === "created", // Needed by the notifier + status: item.status, + })); + return response; + } + /** + * Replaces multiple documents at once. + * Replacements are rejected if targeted documents do not exist + * (like with the normal "replace" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mReplace(index, collection, documents, { refresh, timeout, userId = null, } = {}) { + const alias = this._getAlias(index, collection), kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { + prepareMGet: true, + requireId: true, + }); + if (documentsToGet.length < 1) { + return { errors: rejected, items: [] }; + } + const body = await this._client.mget({ + docs: documentsToGet, + index: alias, + }); + const existingDocuments = body.docs; + const esRequest = { + operations: [], + refresh, + timeout, + }; + const toImport = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + // Documents are retrieved in the same order than we got them from user + const doc = existingDocuments[i]; + if (!("error" in doc) && doc?.found) { + esRequest.operations.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.operations.push(document._source); + toImport.push(document); + } + else { + document._source._kuzzle_info = undefined; + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + return this._mExecute(esRequest, toImport, rejected); + } + /** + * Deletes multiple documents with one request + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Documents IDs + * @param {Object} options - timeout (undefined), refresh (undefined) + * + * @returns {Promise.<{ documents, errors }> + */ + async mDelete(index, collection, ids, { refresh, } = {}) { + const query = { ids: { values: [] } }; + const validIds = []; + const partialErrors = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < ids.length; i++) { + const _id = ids[i]; + if (typeof _id === "string") { + validIds.push(_id); + } + else { + partialErrors.push({ + _id, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + await this.refreshCollection(index, collection); + const { items } = await this.mGet(index, collection, validIds); + let idx = 0; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < validIds.length; i++) { + const validId = validIds[i]; + const item = items[idx]; + if (item && item._id === validId) { + query.ids.values.push(validId); + idx++; + } + else { + partialErrors.push({ + _id: validId, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + // @todo duplicated query to get documents body, mGet here and search in + // deleteByQuery + const { documents } = await this.deleteByQuery(index, collection, query, { + refresh, + }); + return { documents, errors: partialErrors }; + } + /** + * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments + * Returns a standardized ES response object, containing the list of + * successfully performed operations, and the rejected ones + * + * @param {Object} esRequest - Elasticsearch request + * @param {Object[]} documents - Document sources (format: {_id, _source}) + * @param {Object[]} partialErrors - pre-rejected documents + * @param {Object} options - limits (true) + * + * @returns {Promise.} results + */ + async _mExecute(esRequest, documents, partialErrors = [], { limits = true, source = true } = {}) { + assertWellFormedRefresh(esRequest); + if (this._hasExceededLimit(limits, documents)) { + return kerror.reject("services", "storage", "write_limit_exceeded"); + } + let body = { items: [] }; + if (documents.length > 0) { + try { + body = await this._client.bulk(esRequest); + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + const successes = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const item = body.items[i]; + const result = item[Object.keys(item)[0]]; + if (result.status >= 400) { + if (result.status === 404) { + partialErrors.push({ + document: { + _id: documents[i]._id, + body: documents[i]._source, + }, + reason: "document not found", + status: result.status, + }); + } + else { + partialErrors.push({ + document: documents[i], + reason: result.error.reason, + status: result.status, + }); + } + } + else { + successes.push({ + _id: result._id, + _source: source ? documents[i]._source : undefined, + _version: result._version, + created: result.result === "created", + get: result.get, + result: result.result, + status: result.status, // used by mUpdate to get the full document body + }); + } + } + /* end critical code section */ + return { + errors: partialErrors, // @todo rename items to documents + items: successes, + }; + } + /** + * Extracts, injects metadata and validates documents contained + * in a Request + * + * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace + * + * @param {Object[]} documents - Documents + * @param {Object} metadata - Kuzzle metadata + * @param {Object} options - prepareMGet (false), requireId (false) + * + * @returns {Object} { rejected, extractedDocuments, documentsToGet } + */ + _extractMDocuments(documents, metadata, { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}) { + const rejected = []; + const extractedDocuments = []; + const documentsToGet = []; + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < documents.length; i++) { + const document = documents[i]; + if (!(0, safeObject_1.isPlainObject)(document.body) && !prepareMUpsert) { + rejected.push({ + document, + reason: "document body must be an object", + status: 400, + }); + } + else if (!(0, safeObject_1.isPlainObject)(document.changes) && prepareMUpsert) { + rejected.push({ + document, + reason: "document changes must be an object", + status: 400, + }); + } + else if (prepareMUpsert && + document.default && + !(0, safeObject_1.isPlainObject)(document.default)) { + rejected.push({ + document, + reason: "document default must be an object", + status: 400, + }); + } + else if (requireId && typeof document._id !== "string") { + rejected.push({ + document, + reason: "document _id must be a string", + status: 400, + }); + } + else { + this._processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet); + } + } + /* end critical code section */ + return { documentsToGet, extractedDocuments, rejected }; + } + _hasExceededLimit(limits, documents) { + return (limits && + documents.length > global.kuzzle.config.limits.documentsWriteCount); + } + _processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet) { + let extractedDocument; + if (prepareMUpsert) { + extractedDocument = { + _source: { + // Do not use destructuring, it's 10x slower + changes: Object.assign({}, metadata.doc, document.changes), + default: Object.assign({}, metadata.upsert, document.changes, document.default), + }, + }; + } + else { + extractedDocument = { + // Do not use destructuring, it's 10x slower + _source: Object.assign({}, metadata, document.body), + }; + } + if (document._id) { + extractedDocument._id = document._id; + } + extractedDocuments.push(extractedDocument); + if (prepareMGet && typeof document._id === "string") { + documentsToGet.push({ + _id: document._id, + _source: false, + }); + } + } + /** + * Throws an error if the provided mapping is invalid + * + * @param {Object} mapping + * @throws + */ + _checkMappings(mapping, path = [], check = true) { + const properties = Object.keys(mapping); + const mappingProperties = path.length === 0 + ? ROOT_MAPPING_PROPERTIES + : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; + for (const property of properties) { + if (check && !mappingProperties.includes(property)) { + const currentPath = [...path, property].join("."); + throw kerror.get("services", "storage", "invalid_mapping", currentPath, (0, didYouMean_1.default)(property, mappingProperties)); + } + if (property === "properties") { + // type definition level, we don't check + this._checkMappings(mapping[property], [...path, "properties"], false); + } + else if (mapping[property]?.properties) { + // root properties level, check for "properties", "dynamic" and "_meta" + this._checkMappings(mapping[property], [...path, property], true); + } + } + } + /** + * Given index + collection, returns the associated alias name. + * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Alias name (eg: '@&nepali.liia') + */ + _getAlias(index, collection) { + return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + } + /** + * Given an alias name, returns the associated index name. + */ + async _checkIfAliasExists(aliasName) { + return this._client.indices.existsAlias({ + name: aliasName, + }); + } + /** + * Given index + collection, returns the associated indice name. + * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Indice name (eg: '&nepali.liia') + * @throws If there is not exactly one indice associated + */ + async _getIndice(index, collection) { + const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + const body = await this._client.cat.aliases({ + format: "json", + name: alias, + }); + if (body.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } + else if (body.length > 1) { + throw kerror.get("services", "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, '"indices"'); + } + return body[0].index; + } + /** + * Given an ES Request returns the settings of the corresponding indice. + * + * @param esRequest the ES Request with wanted settings. + * @return {Promise<*>} the settings of the indice. + * @private + */ + async _getSettings(esRequest) { + const response = await this._client.indices.getSettings(esRequest); + const index = esRequest.index; + return response[index].settings; + } + /** + * Given index + collection, returns an available indice name. + * Use this function when creating the associated indice. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Available indice name (eg: '&nepali.liia2') + */ + async _getAvailableIndice(index, collection) { + let indice = this._getAlias(index, collection).substring(INDEX_PREFIX_POSITION_IN_ALIAS); + if (!(await this._client.indices.exists({ index: indice }))) { + return indice; + } + let notAvailable; + let suffix; + do { + suffix = `.${(0, name_generator_1.randomNumber)(100000)}`; + const overflow = Buffer.from(indice + suffix).length - 255; + if (overflow > 0) { + const indiceBuffer = Buffer.from(indice); + indice = indiceBuffer + .subarray(0, indiceBuffer.length - overflow) + .toString(); + } + notAvailable = await this._client.indices.exists({ + index: indice + suffix, + }); + } while (notAvailable); + return indice + suffix; + } + /** + * Given an indice, returns the associated alias name. + * + * @param {String} indice + * + * @returns {String} Alias name (eg: '@&nepali.liia') + * @throws If there is not exactly one alias associated that is prefixed with @ + */ + async _getAliasFromIndice(indice) { + const body = await this._client.indices.getAlias({ index: indice }); + const aliases = Object.keys(body[indice].aliases).filter((alias) => alias.startsWith(ALIAS_PREFIX)); + if (aliases.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } + return aliases; + } + /** + * Check for each indice whether it has an alias or not. + * When the latter is missing, create one based on the indice name. + * + * This check avoids a breaking change for those who were using Kuzzle before + * alias attribution for each indice turned into a standard (appear in 2.14.0). + */ + async generateMissingAliases() { + try { + const body = await this._client.cat.indices({ format: "json" }); + const indices = body.map(({ index: indice }) => indice); + const aliases = await this.listAliases(); + const indicesWithoutAlias = indices.filter((indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && + !aliases.some((alias) => alias.indice === indice)); + const esRequest = { body: { actions: [] } }; + for (const indice of indicesWithoutAlias) { + esRequest.body.actions.push({ + add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, + }); + } + if (esRequest.body.actions.length > 0) { + await this._client.indices.updateAliases(esRequest); + } + } + catch (error) { + throw this._esWrapper.formatESError(error); + } + } + /** + * Throws if index or collection includes forbidden characters + * + * @param {String} index + * @param {String} collection + */ + _assertValidIndexAndCollection(index, collection = null) { + if (!this.isIndexNameValid(index)) { + throw kerror.get("services", "storage", "invalid_index_name", index); + } + if (collection !== null && !this.isCollectionNameValid(collection)) { + throw kerror.get("services", "storage", "invalid_collection_name", collection); + } + } + /** + * Given an alias, extract the associated index. + * + * @param {String} alias + * + * @returns {String} Index name + */ + _extractIndex(alias) { + return alias.substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1); + } + /** + * Given an alias, extract the associated collection. + * + * @param {String} alias + * + * @returns {String} Collection name + */ + _extractCollection(alias) { + const separatorPos = alias.indexOf(NAME_SEPARATOR); + return alias.substr(separatorPos + 1, alias.length); + } + /** + * Given aliases, extract indexes and collections. + * + * @param {Array.} aliases + * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. + * + * @returns {Object.} Indexes as key and an array of their collections as value + */ + _extractSchema(aliases, { includeHidden = false } = {}) { + const schema = {}; + for (const alias of aliases) { + const [indexName, collectionName] = alias + .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) + .split(NAME_SEPARATOR); + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && + (collectionName !== HIDDEN_COLLECTION || includeHidden)) { + if (!schema[indexName]) { + schema[indexName] = []; + } + if (!schema[indexName].includes(collectionName)) { + schema[indexName].push(collectionName); + } + } + } + return schema; + } + /** + * Creates the hidden collection on the provided index if it does not already + * exists + * + * @param {String} index Index name + */ + async _createHiddenCollection(index) { + const mutex = new mutex_1.Mutex(`hiddenCollection/${index}`); + try { + await mutex.lock(); + if (await this._hasHiddenCollection(index)) { + return; + } + const esRequest = { + aliases: { + [this._getAlias(index, HIDDEN_COLLECTION)]: {}, + }, + index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), + settings: { + number_of_replicas: this._config.defaultSettings.number_of_replicas, + number_of_shards: this._config.defaultSettings.number_of_shards, + }, + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + await this._client.indices.create(esRequest); + } + catch (e) { + throw this._esWrapper.formatESError(e); + } + finally { + await mutex.unlock(); + } + } + /** + * We need to always wait for a minimal number of shards to be available + * before answering to the client. This is to avoid Elasticsearch node + * to return a 404 Not Found error when the client tries to index a + * document in the index. + * To find the best value for this setting, we need to take into account + * the number of nodes in the cluster and the number of shards per index. + */ + async _getWaitForActiveShards() { + const body = await this._client.cat.nodes({ format: "json" }); + const numberOfNodes = body.length; + if (numberOfNodes > 1) { + return "all"; + } + return 1; + } + /** + * Scroll indice in elasticsearch and return all document that match the filter + * /!\ throws a write_limit_exceed error: this method is intended to be used + * by deleteByQuery and updateByQuery + * + * @param {Object} esRequest - Search request body + * + * @returns {Promise.} resolve to an array of documents + */ + async _getAllDocumentsFromQuery(esRequest) { + let { hits, _scroll_id } = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(hits); + if (totalHitsValue > global.kuzzle.config.limits.documentsWriteCount) { + throw kerror.get("services", "storage", "write_limit_exceeded"); + } + let documents = hits.hits.map((h) => ({ + _id: h._id, + _source: h._source, + body: {}, + })); + while (totalHitsValue !== documents.length) { + ({ hits, _scroll_id } = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: _scroll_id, + })); + documents = documents.concat(hits.hits.map((h) => ({ + _id: h._id, + _source: h._source, + body: {}, + }))); + } + await this.clearScroll(_scroll_id); + return documents; + } + /** + * Clean and normalize the searchBody + * Ensure only allowed parameters are passed to ES + * + * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) + */ + _sanitizeSearchBody(searchBody) { + // Only allow a whitelist of top level properties + for (const key of Object.keys(searchBody)) { + if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { + throw kerror.get("services", "storage", "invalid_search_query", key); + } + } + // Ensure that the body does not include a script + this._scriptCheck(searchBody); + // Avoid empty queries that causes ES to respond with an error. + // Empty queries are turned into match_all queries + if (lodash_1.default.isEmpty(searchBody.query)) { + searchBody.query = { match_all: {} }; + } + return searchBody; + } + /** + * Throw if a script is used in the query. + * + * Only Stored Scripts are accepted + * + * @param {Object} object + */ + _scriptCheck(object) { + for (const [key, value] of Object.entries(object)) { + if (this.scriptKeys.includes(key)) { + for (const scriptArg of Object.keys(value)) { + if (!this.scriptAllowedArgs.includes(scriptArg)) { + throw kerror.get("services", "storage", "invalid_query_keyword", `${key}.${scriptArg}`); + } + } + } + // Every object must be checked here, even the ones nested into an array + else if (typeof value === "object" && value !== null) { + this._scriptCheck(value); + } + } + } + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isCollectionNameValid(name) { + return _isObjectNameValid(name); + } + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isIndexNameValid(name) { + return _isObjectNameValid(name); + } + /** + * Clears an allocated scroll + * @param {[type]} id [description] + * @returns {[type]} [description] + */ + async clearScroll(id) { + if (id) { + (0, debug_1.default)("clearing scroll: %s", id); + await this._client.clearScroll({ scroll_id: id }); + } + } + /** + * Loads a configuration value from services.storageEngine and assert a valid + * ms format. + * + * @param {String} key - relative path to the key in configuration + * + * @returns {Number} milliseconds + */ + _loadMsConfig(key) { + const configValue = lodash_1.default.get(this._config, key); + (0, assert_1.default)(typeof configValue === "string", `services.storageEngine.${key} must be a string.`); + const parsedValue = (0, ms_1.default)(configValue); + (0, assert_1.default)(typeof parsedValue === "number", `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`); + return parsedValue; + } + /** + * Returns true if one of the mappings dynamic property changes value from + * false to true + */ + _dynamicChanges(previousMappings, newMappings) { + const previousValues = findDynamic(previousMappings); + for (const [path, previousValue] of Object.entries(previousValues)) { + if (previousValue.toString() !== "false") { + continue; + } + const newValue = lodash_1.default.get(newMappings, path); + if (newValue && newValue.toString() !== "false") { + return true; + } + } + return false; + } + async waitForElasticsearch() { + if (esState !== esStateEnum.NONE) { + while (esState !== esStateEnum.OK) { + await bluebird_1.default.delay(1000); + } + return; + } + esState = esStateEnum.AWAITING; + global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); + while (esState !== esStateEnum.OK) { + try { + // Wait for at least 1 shard to be initialized + const health = await this._client.cluster.health({ + wait_for_no_initializing_shards: true, + }); + if (health.number_of_pending_tasks === 0) { + global.kuzzle.log.info("[✔] Elasticsearch is ready"); + esState = esStateEnum.OK; + } + else { + global.kuzzle.log.info(`[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining`); + await bluebird_1.default.delay(1000); + } + } + catch (e) { + await bluebird_1.default.delay(1000); + } + } + } + /** + * Checks if the dynamic properties are correct + */ + _checkDynamicProperty(mappings) { + const dynamicProperties = findDynamic(mappings); + for (const [path, value] of Object.entries(dynamicProperties)) { + // Prevent common mistake + if (typeof value === "boolean") { + lodash_1.default.set(mappings, path, value.toString()); + } + else if (typeof value !== "string") { + throw kerror.get("services", "storage", "invalid_mapping", path, "Dynamic property value should be a string."); + } + if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { + throw kerror.get("services", "storage", "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join('", "')}"`); + } + } + } + _setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta) { + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + let lastAction = ""; + const actionNames = ["index", "create", "update", "delete"]; + for (let i = 0; i < esRequest.operations.length; i++) { + const item = esRequest.operations[i]; + const action = Object.keys(item)[0]; + if (actionNames.indexOf(action) !== -1) { + lastAction = action; + item[action]._index = alias; + if (item[action]?._type) { + item[action]._type = undefined; + } + } + else if (lastAction === "index" || lastAction === "create") { + item._kuzzle_info = kuzzleMeta.created; + } + else if (lastAction === "update") { + this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); + } + } + /* end critical code section */ + } + _setLastActionToKuzzleMetaUpdate(item, kuzzleMeta) { + for (const prop of ["doc", "upsert"]) { + if ((0, safeObject_1.isPlainObject)(item[prop])) { + item[prop]._kuzzle_info = kuzzleMeta.updated; + } + } + } + _getHitsTotalValue(hits) { + if (typeof hits.total === "number") { + return hits.total; + } + return hits.total.value; + } +} +exports.ES8 = ES8; +/** + * Finds paths and values of mappings dynamic properties + * + * @example + * + * findDynamic(mappings); + * { + * "properties.metadata.dynamic": "true", + * "properties.user.properties.address.dynamic": "strict" + * } + */ +function findDynamic(mappings, path = [], results = {}) { + if (mappings.dynamic !== undefined) { + results[path.concat("dynamic").join(".")] = mappings.dynamic; + } + for (const [key, value] of Object.entries(mappings)) { + if ((0, safeObject_1.isPlainObject)(value)) { + findDynamic(value, path.concat(key), results); + } + } + return results; +} +/** + * Forbids the use of the _routing ES option + * + * @param {Object} esRequest + * @throws + */ +function assertNoRouting(esRequest) { + if (esRequest._routing) { + throw kerror.get("services", "storage", "no_routing"); + } +} +/** + * Checks if the optional "refresh" argument is well-formed + * + * @param {Object} esRequest + * @throws + */ +function assertWellFormedRefresh(esRequest) { + if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { + throw kerror.get("services", "storage", "invalid_argument", "refresh", '"wait_for", false'); + } +} +function getKuid(userId) { + if (!userId) { + return null; + } + return String(userId); +} +/** + * Checks if an index or collection name is valid + * + * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html + * + * Beware of the length check: ES allows indice names up to 255 bytes, but since + * in Kuzzle we emulate collections as indices, we have to make sure + * that the privacy prefix, the index name, the separator and the collection + * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way + * is to limit index and collection names to 126 bytes and document that + * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) + * + * @param {string} name + * @returns {Boolean} + */ +function _isObjectNameValid(name) { + if (typeof name !== "string" || name.length === 0) { + return false; + } + if (name.toLowerCase() !== name) { + return false; + } + if (Buffer.from(name).length > 126) { + return false; + } + if (name === "_all") { + return false; + } + let valid = true; + for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { + valid = !name.includes(FORBIDDEN_CHARS[i]); + } + return valid; +} +//# sourceMappingURL=elasticsearch.js.map \ No newline at end of file diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts new file mode 100644 index 0000000000..a91152cc4e --- /dev/null +++ b/lib/service/storage/8/elasticsearch.ts @@ -0,0 +1,3844 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import _ from "lodash"; + +import { Client, estypes } from "sdk-es8"; + +import { + InfoResult, + JSONObject, + KImportError, + KRequestBody, + KRequestParams, + KStats, + KStatsIndexes, + KUpdateResponse, +} from "../../../types/storage/8/Elasticsearch"; + +import assert from "assert"; + +import ms from "ms"; +import Bluebird from "bluebird"; +import semver from "semver"; + +import debug from "../../../util/debug"; +import ESWrapper from "./esWrapper"; +import QueryTranslator from "../commons/queryTranslator"; +import didYouMean from "../../../util/didYouMean"; +import * as kerror from "../../../kerror"; +import { assertIsObject } from "../../../util/requestAssertions"; +import { isPlainObject } from "../../../util/safeObject"; +import scopeEnum from "../../../core/storage/storeScopeEnum"; +import extractFields from "../../../util/extractFields"; +import { Mutex } from "../../../util/mutex"; +import { randomNumber } from "../../../util/name-generator"; + +debug("kuzzle:services:elasticsearch"); + +const SCROLL_CACHE_PREFIX = "_docscroll_"; + +const ROOT_MAPPING_PROPERTIES = [ + "properties", + "_meta", + "dynamic", + "dynamic_templates", +]; +const CHILD_MAPPING_PROPERTIES = ["type"]; + +// Used for collection emulation +const HIDDEN_COLLECTION = "_kuzzle_keep"; +const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS +const PRIVATE_PREFIX = "%"; +const PUBLIC_PREFIX = "&"; +const INDEX_PREFIX_POSITION_IN_INDICE = 0; +const INDEX_PREFIX_POSITION_IN_ALIAS = 1; +const NAME_SEPARATOR = "."; +const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; +const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; + +// used to check whether we need to wait for ES to initialize or not +enum esStateEnum { + AWAITING = 1, + NONE = 2, + OK = 3, +} + +let esState = esStateEnum.NONE; + +/** + * @param {Kuzzle} kuzzle kuzzle instance + * @param {Object} config Service configuration + * @param {storeScopeEnum} scope + * @constructor + */ +export class ES8 { + public _client: Client; + public _scope: scopeEnum; + public _indexPrefix: string; + public _esWrapper: ESWrapper; + public _esVersion: any; + public _translator: QueryTranslator; + public searchBodyKeys: string[]; + public scriptKeys: string[]; + public scriptAllowedArgs: string[]; + public maxScrollDuration: number; + public scrollTTL: number; + public _config: any; + + constructor(config: any, scope = scopeEnum.PUBLIC) { + this._config = config; + this._scope = scope; + this._indexPrefix = + scope === scopeEnum.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; + + this._client = null; + this._esWrapper = null; + this._esVersion = null; + this._translator = new QueryTranslator(); + + // Allowed root key of a search query + this.searchBodyKeys = [ + "aggregations", + "aggs", + "collapse", + "explain", + "fields", + "from", + "highlight", + "query", + "search_after", + "search_timeout", + "size", + "sort", + "suggest", + "_name", + "_source", + "_source_excludes", + "_source_includes", + ]; + + /** + * Only allow stored-scripts in queries + */ + this.scriptKeys = ["script", "_script"]; + this.scriptAllowedArgs = ["id", "params"]; + + this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); + + this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); + } + + get scope() { + return this._scope; + } + + /** + * Initializes the elasticsearch client + * + * @override + * @returns {Promise} + */ + async _initSequence() { + if (this._client) { + return; + } + + if ( + global.NODE_ENV !== "development" && + this._config.commonMapping.dynamic === "true" + ) { + global.kuzzle.log.warn( + [ + "Your dynamic mapping policy is set to 'true' for new fields.", + "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", + 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', + ].join("\n") + ); + } + + this._client = new Client(this._config.client); + + await this.waitForElasticsearch(); + + this._esWrapper = new ESWrapper(this._client); + + const { version } = await this._client.info(); + + if (version && !semver.satisfies(semver.coerce(version.number), "^8.0.0")) { + throw kerror.get( + "services", + "storage", + "version_mismatch", + version.number + ); + } + + this._esVersion = version; + } + + /** + * Translate Koncorde filters to Elasticsearch query + * + * @param {Object} filters - Set of valid Koncorde filters + * @returns {Object} Equivalent Elasticsearch query + */ + translateKoncordeFilters(filters) { + return this._translator.translate(filters); + } + + /** + * Returns some basic information about this service + * @override + * + * @returns {Promise.} service informations + */ + async info(): Promise { + const result: InfoResult = { + type: "elasticsearch", + version: this._esVersion, + }; + + try { + const info = await this._client.info(); + result.version = info.version.number; + result.lucene = info.version.lucene_version; + + const health = await this._client.cluster.health(); + result.status = health.status; + + const stats = await this._client.cluster.stats({ human: true }); + result.spaceUsed = stats.indices.store.size; + result.nodes = stats.nodes; + return result; + } catch (error) { + return this._esWrapper.reject(error); + } + } + + /** + * Returns detailed multi-level storage stats data + * + * @returns {Promise.} + */ + async stats(): Promise { + const esRequest: estypes.IndicesStatsRequest = { + metric: ["docs", "store"], + }; + + const stats = await this._client.indices.stats(esRequest); + const indexes: KStatsIndexes = {}; + let size = 0; + + for (const [indice, indiceInfo] of Object.entries(stats.indices)) { + const infos = indiceInfo as any; + // Ignore non-Kuzzle indices + if ( + !indice.startsWith(PRIVATE_PREFIX) && + !indice.startsWith(PUBLIC_PREFIX) + ) { + continue; + } + + const aliases = await this._getAliasFromIndice(indice); + const alias = aliases[0]; + const indexName = this._extractIndex(alias); + const collectionName = this._extractCollection(alias); + + if ( + alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + collectionName === HIDDEN_COLLECTION + ) { + continue; + } + + if (!indexes[indexName]) { + indexes[indexName] = { + collections: [], + name: indexName, + size: 0, + }; + } + + indexes[indexName].collections.push({ + documentCount: infos.total.docs.count, + name: collectionName, + size: infos.total.store.size_in_bytes, + }); + + indexes[indexName].size += infos.total.store.size_in_bytes; + size += infos.total.store.size_in_bytes; + } + + return { + indexes: Object.values(indexes), + size, + }; + } + + /** + * Scrolls results from previous elasticsearch query. + * Automatically clears the scroll context after the last result page has + * been fetched. + * + * @param {String} scrollId - Scroll identifier + * @param {Object} options - scrollTTL (default scrollTTL) + * + * @returns {Promise.<{ scrollId, hits, aggregations, total }>} + */ + async scroll(scrollId: string, { scrollTTL }: { scrollTTL?: string } = {}) { + const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; + const esRequest: estypes.ScrollRequest = { + scroll: _scrollTTL, + scroll_id: scrollId, + }; + + const cacheKey = + SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); + + debug("Scroll: %o", esRequest); + + if (_scrollTTL) { + const scrollDuration = ms(_scrollTTL); + + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get( + "services", + "storage", + "scroll_duration_too_great", + _scrollTTL + ); + } + } + + const stringifiedScrollInfo = await global.kuzzle.ask( + "core:cache:internal:get", + cacheKey + ); + + if (!stringifiedScrollInfo) { + throw kerror.get("services", "storage", "unknown_scroll_id"); + } + + const scrollInfo = JSON.parse(stringifiedScrollInfo); + + try { + const body = await this._client.scroll(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + + scrollInfo.fetched += body.hits.hits.length; + + if (scrollInfo.fetched >= totalHitsValue) { + debug("Last scroll page fetched: deleting scroll %s", body._scroll_id); + await global.kuzzle.ask("core:cache:internal:del", cacheKey); + await this.clearScroll(body._scroll_id); + } else { + await global.kuzzle.ask( + "core:cache:internal:store", + cacheKey, + JSON.stringify(scrollInfo), + { + ttl: ms(_scrollTTL) || this.scrollTTL, + } + ); + } + + const remaining = totalHitsValue - scrollInfo.fetched; + + return await this._formatSearchResult(body, remaining, scrollInfo); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Searches documents from elasticsearch with a query + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * @param {Object} options - from (undefined), size (undefined), scroll (undefined) + * + * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} + */ + async search( + { + index, + collection, + searchBody, + targets, + }: { + index?: string; + collection?: string; + searchBody?: JSONObject; + targets?: any[]; + } = {}, + { + from, + size, + scroll, + }: { + from?: number; + size?: number; + scroll?: string; + } = {} + ) { + let esIndexes: any; + + if (targets && targets.length > 0) { + const indexes = new Set(); + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + + indexes.add(alias); + } + } + + esIndexes = Array.from(indexes).join(","); + } else { + esIndexes = this._getAlias(index, collection); + } + + const esRequest: estypes.SearchRequest = { + ...this._sanitizeSearchBody(searchBody), + from, + index: esIndexes, + scroll, + size, + track_total_hits: true, + }; + + if (scroll) { + const scrollDuration = ms(scroll); + + if (scrollDuration > this.maxScrollDuration) { + throw kerror.get( + "services", + "storage", + "scroll_duration_too_great", + scroll + ); + } + } + + debug("Search: %j", esRequest); + + try { + const body = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + let remaining: number; + + if (body._scroll_id) { + const ttl = + (esRequest.scroll && ms(esRequest.scroll)) || + ms(this._config.defaults.scrollTTL); + + await global.kuzzle.ask( + "core:cache:internal:store", + SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), + JSON.stringify({ + collection, + fetched: body.hits.hits.length, + index, + targets, + }), + { ttl } + ); + + remaining = totalHitsValue - body.hits.hits.length; + } + + return await this._formatSearchResult(body, remaining, { + collection, + index, + targets, + }); + } catch (error) { + console.error(error); + throw this._esWrapper.formatESError(error); + } + } + + /** + * Generate a map that associate an alias to a pair of index and collection + * + * @param {*} targets + * @returns + */ + _mapTargetsToAlias(targets) { + const aliasToTargets = {}; + + for (const target of targets) { + for (const targetCollection of target.collections) { + const alias = this._getAlias(target.index, targetCollection); + if (!aliasToTargets[alias]) { + aliasToTargets[alias] = { + collection: targetCollection, + index: target.index, + }; + } + } + } + + return aliasToTargets; + } + + async _formatSearchResult( + body: any, + remaining?: number, + searchInfo: any = {} + ) { + let aliasToTargets = {}; + const aliasCache = new Map(); + + if (searchInfo.targets) { + /** + * We need to map the alias to the target index and collection, + * so we can later retrieve informations about an index & collection + * based on its alias. + */ + aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); + } + + const formatHit = async (hit) => { + let index = searchInfo.index; + let collection = searchInfo.collection; + + /** + * If the search has been done on multiple targets, we need to + * retrieve the appropriate index and collection based on the alias + */ + if (hit._index && searchInfo.targets) { + // Caching to reduce call to ES + let aliases = aliasCache.get(hit._index); + if (!aliases) { + // Retrieve all the alias associated to one index + aliases = await this._getAliasFromIndice(hit._index); + aliasCache.set(hit._index, aliases); + } + + /** + * Since multiple alias can point to the same index in ES, we need to + * find the first alias that exists in the map of aliases associated + * to the targets. + */ + const alias = aliases.find((_alias) => aliasToTargets[_alias]); + // Retrieve index and collection information based on the matching alias + index = aliasToTargets[alias].index; + collection = aliasToTargets[alias].collection; + } + + return { + _id: hit._id, + _score: hit._score, + _source: hit._source, + collection, + highlight: hit.highlight, + index, + }; + }; + + async function formatInnerHits(innerHits) { + if (!innerHits) { + return undefined; + } + + const formattedInnerHits = {}; + for (const [name, innerHit] of Object.entries(innerHits)) { + formattedInnerHits[name] = await Bluebird.map( + (innerHit as any).hits.hits, + formatHit + ); + } + return formattedInnerHits; + } + + const hits = await Bluebird.map(body.hits.hits, async (hit) => ({ + inner_hits: await formatInnerHits(hit.inner_hits), + ...(await formatHit(hit)), + })); + + return { + aggregations: body.aggregations, + hits, + remaining, + scrollId: body._scroll_id, + suggest: body.suggest, + total: body.hits.total.value, + }; + } + + /** + * Gets the document with given ID + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async get(index, collection, id) { + const esRequest: estypes.GetRequest = { + id, + index: this._getAlias(index, collection), + }; + + // Just in case the user make a GET on url /mainindex/test/_search + // Without this test we return something weird: a result.hits.hits with all + // document without filter because the body is empty in HTTP by default + if (esRequest.id === "_search") { + return kerror.reject("services", "storage", "search_as_an_id"); + } + + debug("Get document: %o", esRequest); + + try { + const body = await this._client.get(esRequest); + + return { + _id: body._id, + _source: body._source, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Returns the list of documents matching the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mGet(index: string, collection: string, ids: string[]) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + + const esRequest: estypes.MgetRequest = { + docs: ids.map((_id) => ({ + _id, + _index: this._getAlias(index, collection), + })), + }; + + debug("Multi-get documents: %o", esRequest); + + let body: estypes.MgetResponse>; + + try { + body = await this._client.mget(esRequest); // NOSONAR + } catch (e) { + throw this._esWrapper.formatESError(e); + } + + const errors = []; + const items = []; + + for (const doc of body.docs) { + if (!("error" in doc) && doc.found) { + items.push({ + _id: doc._id, + _source: doc._source, + _version: doc._version, + }); + } else { + errors.push(doc._id); + } + } + + return { errors, items }; + } + + /** + * Counts how many documents match the filter given in body + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} searchBody - Search request body (query, sort, etc.) + * + * @returns {Promise.} count + */ + async count(index: string, collection: string, searchBody = {}) { + const esRequest: estypes.CountRequest = { + ...this._sanitizeSearchBody(searchBody), + index: this._getAlias(index, collection), + }; + + debug("Count: %o", esRequest); + + try { + const body = await this._client.count(esRequest); + return body.count; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends the new document to elasticsearch + * Cleans data to match elasticsearch specifications + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} content - Document content + * @param {Object} options - id (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { _id, _version, _source } + */ + async create( + index: string, + collection: string, + content: JSONObject, + { + id, + refresh, + userId = null, + injectKuzzleMeta = true, + }: { + id?: string; + refresh?: boolean | "wait_for"; + userId?: string; + injectKuzzleMeta?: boolean; + } = {} + ) { + assertIsObject(content); + + const esRequest: estypes.IndexRequest> = { + document: content, + id, + index: this._getAlias(index, collection), + op_type: id ? "create" : "index", + refresh, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + // Add metadata + if (injectKuzzleMeta) { + esRequest.document._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }; + } + + debug("Create document: %o", esRequest); + + try { + const body = await this._client.index(esRequest); + + return { + _id: body._id, + _source: esRequest.document, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Creates a new document to Elasticsearch, or replace it if it already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) + * + * @returns {Promise.} { _id, _version, _source, created } + */ + async createOrReplace( + index, + collection, + id, + content, + { + refresh, + userId = null, + injectKuzzleMeta = true, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + injectKuzzleMeta?: boolean; + } = {} + ) { + const esRequest: estypes.IndexRequest> = { + document: content, + id, + index: this._getAlias(index, collection), + refresh, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + // Add metadata + if (injectKuzzleMeta) { + esRequest.document._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + + debug("Create or replace document: %o", esRequest); + + try { + const body = await this._client.index(esRequest); + + return { + _id: body._id, + _source: esRequest.document, + _version: body._version, + created: body.result === "created", // Needed by the notifier + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends the partial document to elasticsearch with the id to update + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async update( + index: string, + collection: string, + id: string, + content: JSONObject, + { + refresh, + userId = null, + retryOnConflict, + injectKuzzleMeta = true, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + retryOnConflict?: number; + injectKuzzleMeta?: boolean; + } = {} + ): Promise { + const esRequest: estypes.UpdateRequest< + KRequestBody, + KRequestBody + > = { + _source: true, + doc: content, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + if (injectKuzzleMeta) { + // Add metadata + esRequest.doc._kuzzle_info = { + ...esRequest.doc._kuzzle_info, + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + + debug("Update document: %o", esRequest); + + try { + const body = await this._client.update(esRequest); + + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends the partial document to elasticsearch with the id to update + * Creates the document if it doesn't already exist + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Updated content + * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) + * + * @returns {Promise.<{ _id, _version }>} + */ + async upsert( + index: string, + collection: string, + id: string, + content: JSONObject, + { + defaultValues = {}, + refresh, + userId = null, + retryOnConflict, + injectKuzzleMeta = true, + }: { + defaultValues?: JSONObject; + refresh?: boolean | "wait_for"; + userId?: string; + retryOnConflict?: number; + injectKuzzleMeta?: boolean; + } = {} + ) { + const esRequest: estypes.UpdateRequest< + KRequestBody, + KRequestBody + > = { + _source: true, + doc: content, + id, + index: this._getAlias(index, collection), + refresh, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + upsert: { ...defaultValues, ...content }, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + // Add metadata + const user = getKuid(userId); + const now = Date.now(); + + if (injectKuzzleMeta) { + esRequest.doc._kuzzle_info = { + ...esRequest.doc._kuzzle_info, + updatedAt: now, + updater: user, + }; + esRequest.upsert._kuzzle_info = { + ...esRequest.upsert._kuzzle_info, + author: user, + createdAt: now, + }; + } + + debug("Upsert document: %o", esRequest); + + try { + const body = await this._client.update(esRequest); + + return { + _id: body._id, + _source: body.get._source, + _version: body._version, + created: body.result === "created", + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Replaces a document to Elasticsearch + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} content - Document content + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async replace( + index: string, + collection: string, + id: string, + content: JSONObject, + { + refresh, + userId = null, + injectKuzzleMeta = true, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + injectKuzzleMeta?: boolean; + } = {} + ) { + const alias = this._getAlias(index, collection); + const esRequest: estypes.IndexRequest> = { + document: content, + id, + index: alias, + refresh, + }; + + assertNoRouting(esRequest); + assertWellFormedRefresh(esRequest); + + if (injectKuzzleMeta) { + // Add metadata + esRequest.document._kuzzle_info = { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: Date.now(), + updater: getKuid(userId), + }; + } + + try { + const exists = await this._client.exists({ id, index: alias }); + + if (!exists) { + throw kerror.get( + "services", + "storage", + "not_found", + id, + index, + collection + ); + } + + debug("Replace document: %o", esRequest); + + const body = await this._client.index(esRequest); + + return { + _id: id, + _source: esRequest.document, + _version: body._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Sends to elasticsearch the document id to delete + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Object} options - refresh (undefined) + * + * @returns {Promise} + */ + async delete( + index: string, + collection: string, + id: string, + { + refresh, + }: { + refresh?: boolean | "wait_for"; + } = {} + ) { + const esRequest: estypes.DeleteRequest = { + id, + index: this._getAlias(index, collection), + refresh, + }; + + assertWellFormedRefresh(esRequest); + + debug("Delete document: %o", esRequest); + + try { + await this._client.delete(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + return null; + } + + /** + * Deletes all documents matching the provided filters. + * If fetch=false, the max documents write limit is not applied. + * + * Options: + * - size: size of the batch to retrieve documents (no-op if fetch=false) + * - refresh: refresh option for ES + * - fetch: if true, will fetch the documents before delete them + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} options - size (undefined), refresh (undefined), fetch (true) + * + * @returns {Promise.<{ documents, total, deleted, failures: Array<{ id, reason }> }>} + */ + async deleteByQuery( + index: string, + collection: string, + query: JSONObject, + { + refresh, + size = 1000, + fetch = true, + }: { + refresh?: boolean | "wait_for"; + size?: number; + fetch?: boolean; + } = {} + ) { + const esRequest = { + ...this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + } satisfies estypes.DeleteByQueryRequest | estypes.SearchRequest; + + if (!isPlainObject(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + + try { + let documents = []; + + if (fetch) { + documents = await this._getAllDocumentsFromQuery({ + ...esRequest, + size, + }); + } + + debug("Delete by query: %o", esRequest); + + esRequest.refresh = refresh === "wait_for" ? true : refresh; + + const request = { + ...esRequest, + max_docs: size, + }; + + if (request.max_docs === -1) { + request.max_docs = undefined; + } + + const body = await this._client.deleteByQuery(request); + + return { + deleted: body.deleted, + documents, + failures: body.failures.map(({ id, cause }) => ({ + id, + reason: cause.reason, + })), + total: body.total, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Delete fields of a document and replace it + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document id + * @param {Array} fields - Document fields to be removed + * @param {Object} options - refresh (undefined), userId (null) + * + * @returns {Promise.<{ _id, _version, _source }>} + */ + async deleteFields( + index: string, + collection: string, + id: string, + fields: string, + { + refresh, + userId = null, + }: { + refresh?: boolean | "wait_for"; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection); + const esRequest: estypes.GetRequest = { + id, + index: alias, + }; + + try { + debug("DeleteFields document: %o", esRequest); + const body = await this._client.get(esRequest); + + for (const field of fields) { + if (_.has(body._source, field)) { + _.set(body._source, field, undefined); + } + } + + const updatedInfos = { + updatedAt: Date.now(), + updater: getKuid(userId), + }; + + if (typeof body._source._kuzzle_info === "object") { + body._source._kuzzle_info = { + ...body._source._kuzzle_info, + ...updatedInfos, + }; + } else { + body._source._kuzzle_info = updatedInfos; + } + + const newEsRequest: estypes.IndexRequest = { + document: body._source, + id, + index: alias, + refresh, + }; + + assertNoRouting(newEsRequest); + assertWellFormedRefresh(newEsRequest); + + const updated = await this._client.index(newEsRequest); + + return { + _id: id, + _source: body._source, + _version: updated._version, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined), size (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async updateByQuery( + index: string, + collection: string, + query: JSONObject, + changes: JSONObject, + { + refresh, + size = 1000, + userId = null, + }: { + refresh?: boolean | "wait_for"; + size?: number; + userId?: string; + } = {} + ) { + try { + const esRequest: estypes.SearchRequest = { + ...this._sanitizeSearchBody({ query }), + index: this._getAlias(index, collection), + scroll: "5s", + size, + }; + + const documents = await this._getAllDocumentsFromQuery(esRequest); + + for (const document of documents) { + document._source = undefined; + document.body = changes; + } + + debug("Update by query: %o", esRequest); + + const { errors, items } = await this.mUpdate( + index, + collection, + documents, + { refresh, userId } + ); + + return { + errors, + successes: items, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Updates all documents matching the provided filters + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Object} changes - Changes wanted on documents + * @param {Object} options - refresh (undefined) + * + * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} + */ + async bulkUpdateByQuery( + index: string, + collection: string, + query: JSONObject, + changes: JSONObject, + { + refresh = false, + }: { + refresh?: boolean; + } = {} + ) { + const script = { + params: {}, + source: "", + }; + + const flatChanges = extractFields(changes, { alsoExtractValues: true }); + + for (const { key, value } of flatChanges) { + script.source += `ctx._source.${key} = params['${key}'];`; + script.params[key] = value; + } + + const esRequest: estypes.UpdateByQueryRequest = { + index: this._getAlias(index, collection), + query: this._sanitizeSearchBody({ query }).query, + refresh, + script, + }; + + debug("Bulk Update by query: %o", esRequest); + + let response: estypes.UpdateByQueryResponse; + + try { + response = await this._client.updateByQuery(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + if (response.failures.length) { + const errors = response.failures.map(({ id, cause }) => ({ + cause, + id, + })); + + throw kerror.get( + "services", + "storage", + "incomplete_update", + response.updated, + errors + ); + } + + return { + updated: response.updated, + }; + } + + /** + * Execute the callback with a batch of documents of specified size until all + * documents matched by the query have been processed. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} query - Query to match documents + * @param {Function} callback - callback that will be called with the "hits" array + * @param {Object} options - size (10), scrollTTL ('5s') + * + * @returns {Promise.} Array of results returned by the callback + */ + async mExecute( + index: string, + collection: string, + query: JSONObject, + callback: any, + { + size = 10, + scrollTTl = "5s", + }: { + size?: number; + scrollTTl?: string; + } = {} + ): Promise { + const esRequest: estypes.SearchRequest = { + ...this._sanitizeSearchBody({ query }), + from: 0, + index: this._getAlias(index, collection), + scroll: scrollTTl, + size, + }; + + if (!isPlainObject(query)) { + throw kerror.get("services", "storage", "missing_argument", "body.query"); + } + + const results = []; + let processed = 0; + let scrollId = null; + + try { + let body = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(body.hits); + + while (processed < totalHitsValue && body.hits.hits.length > 0) { + scrollId = body._scroll_id; + results.push(await callback(body.hits.hits)); + processed += body.hits.hits.length; + + body = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: scrollId, + }); + } + } finally { + await this.clearScroll(scrollId); + } + + return results; + } + + /** + * Creates a new index. + * + * This methods creates an hidden collection in the provided index to be + * able to list it. + * This methods resolves if the index name does not already exists either as + * private or public index. + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async createIndex(index: string) { + this._assertValidIndexAndCollection(index); + + let body: estypes.CatAliasesResponse; + + try { + body = await this._client.cat.aliases({ format: "json" }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias: name }) => name); + for (const alias of aliases) { + const indexName = this._extractIndex(alias); + + if (index === indexName) { + const indexType = + alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX + ? "private" + : "public"; + + throw kerror.get( + "services", + "storage", + "index_already_exists", + indexType, + index + ); + } + } + + await this._createHiddenCollection(index); + + return null; + } + + /** + * Creates an empty collection. + * Mappings and settings will be applied if supplied. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async createCollection( + index: string, + collection: string, + { + mappings = {}, + settings = {}, + }: { + mappings?: estypes.MappingTypeMapping; + settings?: Record; + } = {} + ) { + this._assertValidIndexAndCollection(index, collection); + + if (collection === HIDDEN_COLLECTION) { + throw kerror.get( + "services", + "storage", + "collection_reserved", + HIDDEN_COLLECTION + ); + } + + const mutex = new Mutex(`hiddenCollection/create/${index}`); + try { + await mutex.lock(); + + if (await this._hasHiddenCollection(index)) { + await this.deleteCollection(index, HIDDEN_COLLECTION); + } + } catch (error) { + throw this._esWrapper.formatESError(error); + } finally { + await mutex.unlock(); + } + + const esRequest: estypes.IndicesCreateRequest = { + aliases: { + [this._getAlias(index, collection)]: {}, + }, + index: await this._getAvailableIndice(index, collection), + mappings: {}, + settings, + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + + this._checkDynamicProperty(mappings); + + const exists = await this.hasCollection(index, collection); + if (exists) { + return this.updateCollection(index, collection, { mappings, settings }); + } + + this._checkMappings(mappings); + + esRequest.mappings = { + _meta: mappings._meta || this._config.commonMapping._meta, + dynamic: mappings.dynamic || this._config.commonMapping.dynamic, + properties: _.merge( + mappings.properties, + this._config.commonMapping.properties + ), + }; + + esRequest.settings.number_of_replicas = + esRequest.settings.number_of_replicas || + this._config.defaultSettings.number_of_replicas; + + esRequest.settings.number_of_shards = + esRequest.settings.number_of_shards || + this._config.defaultSettings.number_of_shards; + + try { + await this._client.indices.create(esRequest); + } catch (error) { + if ( + _.get(error, "meta.body.error.type") === + "resource_already_exists_exception" + ) { + // race condition: the indice has been created between the "exists" + // check above and this "create" attempt + return null; + } + + throw this._esWrapper.formatESError(error); + } + + return null; + } + + /** + * Retrieves settings definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.<{ settings }>} + */ + async getSettings(index: string, collection: string) { + const indice = await this._getIndice(index, collection); + const esRequest: estypes.IndicesGetSettingsRequest = { + index: indice, + }; + + debug("Get settings: %o", esRequest); + + try { + const body = await this._client.indices.getSettings(esRequest); + + return body[indice].settings.index; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Retrieves mapping definition for index/type + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} options - includeKuzzleMeta (false) + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async getMapping( + index: string, + collection: string, + { + includeKuzzleMeta = false, + }: { + includeKuzzleMeta?: boolean; + } = {} + ) { + const indice = await this._getIndice(index, collection); + const esRequest: estypes.IndicesGetMappingRequest = { + index: indice, + }; + + debug("Get mapping: %o", esRequest); + + try { + const body = await this._client.indices.getMapping(esRequest); + + const properties = includeKuzzleMeta + ? body[indice].mappings.properties + : _.omit(body[indice].mappings.properties, "_kuzzle_info"); + + return { + _meta: body[indice].mappings._meta, + dynamic: body[indice].mappings.dynamic, + properties, + }; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Updates a collection mappings and settings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} config - mappings ({}), settings ({}) + * + * @returns {Promise} + */ + async updateCollection( + index: string, + collection: string, + { + mappings = {}, + settings = {}, + }: { + mappings?: estypes.MappingTypeMapping; + settings?: Record; + } = {} + ) { + const esRequest: estypes.IndicesGetSettingsRequest = { + index: await this._getIndice(index, collection), + }; + + // If either the putMappings or the putSettings operation fail, we need to + // rollback the whole operation. Since mappings can't be rollback, we try to + // update the settings first, then the mappings and we rollback the settings + // if putMappings fail. + let indexSettings; + + try { + indexSettings = await this._getSettings(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + if (!_.isEmpty(settings)) { + await this.updateSettings(index, collection, settings); + } + + try { + if (!_.isEmpty(mappings)) { + const previousMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + + await this.updateMapping(index, collection, mappings); + + if (this._dynamicChanges(previousMappings, mappings)) { + await this.updateSearchIndex(index, collection); + } + } + } catch (error) { + const allowedSettings = this.getAllowedIndexSettings(indexSettings); + + // Rollback to previous settings + if (!_.isEmpty(settings)) { + await this.updateSettings(index, collection, allowedSettings); + } + + throw error; + } + + return null; + } + + /** + * Given index settings we return a new version of index settings + * only with allowed settings that can be set (during update or create index). + * @param indexSettings the index settings + * @returns {{index: *}} a new index settings with only allowed settings. + */ + getAllowedIndexSettings(indexSettings) { + return { + index: _.omit(indexSettings.index, [ + "creation_date", + "provided_name", + "uuid", + "version", + ]), + }; + } + + /** + * Sends an empty UpdateByQuery request to update the search index + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @returns {Promise.} {} + */ + async updateSearchIndex(index: string, collection: string) { + const esRequest: estypes.UpdateByQueryRequest = { + // @cluster: conflicts when two nodes start at the same time + conflicts: "proceed", + index: this._getAlias(index, collection), + refresh: true, + // This operation can take some time: this should be an ES + // background task. And it's preferable to a request timeout when + // processing large indexes. + wait_for_completion: false, + }; + + debug("UpdateByQuery: %o", esRequest); + + try { + await this._client.updateByQuery(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Update a collection mappings + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} mappings - Collection mappings in ES format + * + * @returns {Promise.<{ dynamic, _meta, properties }>} + */ + async updateMapping( + index: string, + collection: string, + mappings: estypes.MappingTypeMapping = {} + ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { + let esRequest: estypes.IndicesPutMappingRequest = { + index: this._getAlias(index, collection), + }; + + this._checkDynamicProperty(mappings); + + const collectionMappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + + this._checkMappings(mappings); + + esRequest = { + ...esRequest, + _meta: mappings._meta || collectionMappings._meta, + dynamic: mappings.dynamic || collectionMappings.dynamic, + properties: mappings.properties, + }; + + debug("Update mapping: %o", esRequest); + + try { + await this._client.indices.putMapping(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const fullProperties = _.merge( + collectionMappings.properties, + mappings.properties + ); + + return { + _meta: esRequest._meta, + dynamic: esRequest.dynamic.toString(), + properties: fullProperties, + }; + } + + /** + * Updates a collection settings (eg: analyzers) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object} settings - Collection settings in ES format + * + * @returns {Promise} + */ + async updateSettings(index, collection, settings = {}) { + const esRequest = { + index: this._getAlias(index, collection), + }; + + await this._client.indices.close(esRequest); + + try { + await this._client.indices.putSettings({ ...esRequest, body: settings }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } finally { + await this._client.indices.open(esRequest); + } + + return null; + } + + /** + * Empties the content of a collection. Keep the existing mapping and settings. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async truncateCollection(index: string, collection: string) { + let mappings; + let settings; + + const esRequest = { + index: await this._getIndice(index, collection), + }; + + try { + mappings = await this.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + settings = await this._getSettings(esRequest); + settings = { + ...settings, + ...this.getAllowedIndexSettings(settings), + }; + await this._client.indices.delete(esRequest); + + await this._client.indices.create({ + ...esRequest, + aliases: { + [this._getAlias(index, collection)]: {}, + }, + mappings, + settings, + wait_for_active_shards: await this._getWaitForActiveShards(), + }); + + return null; + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Runs several action and document + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents to import + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async import( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection); + const dateNow = Date.now(); + const esRequest: estypes.BulkRequest = { + operations: documents, + refresh, + timeout, + }; + + const kuzzleMeta = { + created: { + author: getKuid(userId), + createdAt: dateNow, + updatedAt: null, + updater: null, + }, + updated: { + updatedAt: dateNow, + updater: getKuid(userId), + }, + }; + + assertWellFormedRefresh(esRequest); + this._scriptCheck(documents); + + this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); + + let body: estypes.BulkResponse; + + try { + body = await this._client.bulk(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const result = { + errors: [], + items: [], + }; + + let idx = 0; + + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const row = body.items[i]; + const action = Object.keys(row)[0]; + const item = row[action]; + + if (item.status >= 400) { + const error: KImportError = { + _id: item._id, + status: item.status, + }; + + // update action contain body in "doc" field + // the delete action is not followed by an action payload + if (action === "update") { + error._source = documents[idx + 1].doc; + error._source._kuzzle_info = undefined; + } else if (action !== "delete") { + error._source = documents[idx + 1]; + error._source._kuzzle_info = undefined; + } + + // ES response does not systematicaly include an error object + // (e.g. delete action with 404 status) + if (item.error) { + error.error = { + reason: item.error.reason, + type: item.error.type, + }; + } + + result.errors.push({ [action]: error }); + } else { + result.items.push({ + [action]: { + _id: item._id, + status: item.status, + }, + }); + } + + // the delete action is not followed by an action payload + idx = action === "delete" ? idx + 1 : idx + 2; + } + /* end critical code section */ + + return result; + } + + /** + * Retrieves the complete list of existing collections in the current index + * + * @param {String} index - Index name + * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results + * + * @returns {Promise.} Collection names + */ + async listCollections(index, { includeHidden = false } = {}) { + let body: estypes.CatAliasesResponse; + + try { + body = await this._client.cat.aliases({ format: "json" }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias }) => alias); + + const schema = this._extractSchema(aliases, { includeHidden }); + + return schema[index] || []; + } + + /** + * Retrieves the complete list of indexes + * + * @returns {Promise.} Index names + */ + async listIndexes() { + let body: estypes.CatAliasesResponse; + + try { + body = await this._client.cat.aliases({ format: "json" }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias }) => alias); + + const schema = this._extractSchema(aliases); + + return Object.keys(schema); + } + + /** + * Returns an object containing the list of indexes and collections + * + * @returns {Object.} Object + */ + async getSchema() { + let body: estypes.CatAliasesResponse; + + try { + body = await this._client.cat.aliases({ format: "json" }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = body.map(({ alias }) => alias); + + const schema = this._extractSchema(aliases, { includeHidden: true }); + + for (const [index, collections] of Object.entries(schema)) { + schema[index] = (collections as string[]).filter( + (c) => c !== HIDDEN_COLLECTION + ); + } + + return schema; + } + + /** + * Retrieves the complete list of aliases + * + * @returns {Promise.} [ { alias, index, collection, indice } ] + */ + async listAliases() { + let body: estypes.CatAliasesResponse; + + try { + body = await this._client.cat.aliases({ format: "json" }); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + const aliases = []; + + for (const { alias, index: indice } of body) { + if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { + aliases.push({ + alias, + collection: this._extractCollection(alias), + index: this._extractIndex(alias), + indice, + }); + } + } + return aliases; + } + + /** + * Deletes a collection + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise} + */ + async deleteCollection(index: string, collection: string): Promise { + const indice = await this._getIndice(index, collection); + const esRequest: estypes.IndicesDeleteRequest = { + index: indice, + }; + + try { + await this._client.indices.delete(esRequest); + const alias = this._getAlias(index, collection); + + if (await this._checkIfAliasExists(alias)) { + await this._client.indices.deleteAlias({ + index: indice, + name: alias, + }); + } + + await this._createHiddenCollection(index); + } catch (e) { + throw this._esWrapper.formatESError(e); + } + + return null; + } + + /** + * Deletes multiple indexes + * + * @param {String[]} indexes - Index names + * + * @returns {Promise.} + */ + async deleteIndexes(indexes: string[] = []) { + if (indexes.length === 0) { + return Bluebird.resolve([]); + } + const deleted = new Set(); + + try { + const body = await this._client.cat.aliases({ format: "json" }); + + const esRequest = body.reduce( + (request, { alias, index: indice }) => { + const index = this._extractIndex(alias); + + if ( + alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || + !indexes.includes(index) + ) { + return request; + } + + deleted.add(index); + request.index.push(indice); + + return request; + }, + { index: [] } + ); + + if (esRequest.index.length === 0) { + return []; + } + + debug("Delete indexes: %o", esRequest); + + await this._client.indices.delete(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + return Array.from(deleted); + } + + /** + * Deletes an index + * + * @param {String} index - Index name + * + * @returns {Promise} + */ + async deleteIndex(index: string): Promise { + await this.deleteIndexes([index]); + + return null; + } + + /** + * Forces a refresh on the collection. + * + * /!\ Can lead to some performance issues. + * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} { _shards } + */ + async refreshCollection(index: string, collection: string) { + const esRequest: estypes.IndicesRefreshRequest = { + index: this._getAlias(index, collection), + }; + + let body: estypes.IndicesRefreshResponse; + + try { + body = await this._client.indices.refresh(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + + return body; + } + + /** + * Returns true if the document exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {String} id - Document ID + * + * @returns {Promise.} + */ + async exists( + index: string, + collection: string, + id: string + ): Promise { + const esRequest: estypes.ExistsRequest = { + id, + index: this._getAlias(index, collection), + }; + + try { + return await this._client.exists(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Returns the list of documents existing with the ids given in the body param + * NB: Due to internal Kuzzle mechanism, can only be called on a single + * index/collection, using the body { ids: [.. } syntax. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Document IDs + * + * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} + */ + async mExists(index: string, collection: string, ids: string[]) { + if (ids.length === 0) { + return { errors: [], item: [] }; + } + + const esRequest: estypes.MgetRequest = { + _source: "false", + docs: ids.map((_id) => ({ _id })), + index: this._getAlias(index, collection), + }; + + debug("mExists: %o", esRequest); + + let body: estypes.MgetResponse; + + try { + body = await this._client.mget(esRequest); // NOSONAR + } catch (e) { + throw this._esWrapper.formatESError(e); + } + + const errors = []; + const items = []; + + for (let i = 0; i < body.docs.length; i++) { + const doc = body.docs[i]; + + if (!("error" in doc) && doc.found) { + items.push(doc._id); + } else { + errors.push(doc._id); + } + } + + return { errors, items }; + } + + /** + * Returns true if the index exists + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async hasIndex(index: string): Promise { + const indexes = await this.listIndexes(); + + return indexes.some((idx) => idx === index); + } + + /** + * Returns true if the collection exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * + * @returns {Promise.} + */ + async hasCollection(index: string, collection: string): Promise { + const collections = await this.listCollections(index); + + return collections.some((col: string) => col === collection); + } + + /** + * Returns true if the index has the hidden collection + * + * @param {String} index - Index name + * + * @returns {Promise.} + */ + async _hasHiddenCollection(index) { + const collections = await this.listCollections(index, { + includeHidden: true, + }); + + return collections.some((col) => col === HIDDEN_COLLECTION); + } + + /** + * Creates multiple documents at once. + * If a content has no id, one is automatically generated and assigned to it. + * If a content has a specified identifier, it is rejected if it already exists + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mCreate( + index: string, + collection: string, + documents: JSON[], + { + refresh, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection), + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, + { rejected, extractedDocuments, documentsToGet } = + this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); + + // prepare the mget request, but only for document having a specified id + const body = + documentsToGet.length > 0 + ? await this._client.mget({ + docs: documentsToGet, + index: alias, + }) + : { docs: [] }; + + const existingDocuments = body.docs; + const esRequest: estypes.BulkRequest = { + index: alias, + operations: [], + refresh, + timeout, + }; + const toImport = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + + // Documents are retrieved in the same order than we got them from user + if (typeof document._id === "string" && existingDocuments[idx]) { + const doc = existingDocuments[idx]; + + if (!("error" in doc) && doc.found) { + document._source._kuzzle_info = undefined; + + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document already exists", + status: 400, + }); + } else { + esRequest.operations.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.operations.push(document._source); + + toImport.push(document); + } + idx++; + } else { + esRequest.operations.push({ index: { _index: alias } }); + esRequest.operations.push(document._source); + + toImport.push(document); + } + } + /* end critical code section */ + + return this._mExecute(esRequest, toImport, rejected); + } + + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) + * + * @returns {Promise.<{ items, errors }> + */ + async mCreateOrReplace( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + timeout, + userId = null, + injectKuzzleMeta = true, + limits = true, + source = true, + }: KRequestParams = {} + ) { + let kuzzleMeta = {}; + + if (injectKuzzleMeta) { + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }; + } + + const alias = this._getAlias(index, collection); + const esRequest: estypes.BulkRequest = { + index: alias, + operations: [], + refresh, + timeout, + }; + const { rejected, extractedDocuments } = this._extractMDocuments( + documents, + kuzzleMeta + ); + + esRequest.operations = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.operations.push({ + index: { + _id: extractedDocuments[i]._id, + _index: alias, + }, + }); + esRequest.operations.push(extractedDocuments[i]._source); + } + /* end critical code section */ + + return this._mExecute(esRequest, extractedDocuments, rejected, { + limits, + source, + }); + } + + /** + * Updates multiple documents with one request + * Replacements are rejected if targeted documents do not exist + * (like with the normal "update" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mUpdate( + index: string, + collection: string, + documents: JSONObject[], + { + refresh = undefined, + retryOnConflict = 0, + timeout = undefined, + userId = null, + } = {} + ) { + const alias = this._getAlias(index, collection), + toImport = [], + esRequest: estypes.BulkRequest = { + index: alias, + operations: [], + refresh, + timeout, + }, + kuzzleMeta = { + _kuzzle_info: { + updatedAt: Date.now(), + updater: getKuid(userId), + }, + }, + { rejected, extractedDocuments } = this._extractMDocuments( + documents, + kuzzleMeta + ); + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const extractedDocument = extractedDocuments[i]; + + if (typeof extractedDocument._id === "string") { + esRequest.operations.push({ + update: { + _id: extractedDocument._id, + _index: alias, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }); + + // _source: true => makes ES return the updated document source in the + // response. Required by the real-time notifier component + esRequest.operations.push({ + _source: true, + doc: extractedDocument._source, + }); + toImport.push(extractedDocument); + } else { + extractedDocument._source._kuzzle_info = undefined; + + rejected.push({ + document: { + _id: extractedDocument._id, + body: extractedDocument._source, + }, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + + const response = await this._mExecute(esRequest, toImport, rejected); + + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + status: item.status, + })); + + return response; + } + + /** + * Creates or replaces multiple documents at once. + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) + * + * @returns {Promise.<{ items, errors }> + */ + async mUpsert( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + retryOnConflict = 0, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + retryOnConflict?: number; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection); + const esRequest: estypes.BulkRequest = { + operations: [], + refresh, + timeout, + }; + + const user = getKuid(userId); + const now = Date.now(); + const kuzzleMeta = { + doc: { + _kuzzle_info: { + updatedAt: now, + updater: user, + }, + }, + upsert: { + _kuzzle_info: { + author: user, + createdAt: now, + }, + }, + }; + + const { rejected, extractedDocuments } = this._extractMDocuments( + documents, + kuzzleMeta, + { + prepareMUpsert: true, + requireId: true, + } + ); + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + esRequest.operations.push( + { + update: { + _id: extractedDocuments[i]._id, + _index: alias, + _source: true, + retry_on_conflict: + retryOnConflict || this._config.defaults.onUpdateConflictRetries, + }, + }, + { + doc: extractedDocuments[i]._source.changes, + upsert: extractedDocuments[i]._source.default, + } + ); + // _source: true + // Makes ES return the updated document source in the response. + // Required by the real-time notifier component + } + /* end critical code section */ + + const response = await this._mExecute( + esRequest, + extractedDocuments, + rejected + ); + + // with _source: true, ES returns the updated document in + // response.result.get._source + // => we replace response.result._source with it so that the notifier + // module can seamlessly process all kind of m* response* + response.items = response.items.map((item) => ({ + _id: item._id, + _source: item.get._source, + _version: item._version, + created: item.result === "created", // Needed by the notifier + status: item.status, + })); + + return response; + } + + /** + * Replaces multiple documents at once. + * Replacements are rejected if targeted documents do not exist + * (like with the normal "replace" method) + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Object[]} documents - Documents + * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) + * + * @returns {Promise.} { items, errors } + */ + async mReplace( + index: string, + collection: string, + documents: JSONObject[], + { + refresh, + timeout, + userId = null, + }: { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + } = {} + ) { + const alias = this._getAlias(index, collection), + kuzzleMeta = { + _kuzzle_info: { + author: getKuid(userId), + createdAt: Date.now(), + updatedAt: null, + updater: null, + }, + }, + { rejected, extractedDocuments, documentsToGet } = + this._extractMDocuments(documents, kuzzleMeta, { + prepareMGet: true, + requireId: true, + }); + + if (documentsToGet.length < 1) { + return { errors: rejected, items: [] }; + } + + const body = await this._client.mget({ + docs: documentsToGet, + index: alias, + }); + + const existingDocuments = body.docs; + const esRequest: estypes.BulkRequest = { + operations: [], + refresh, + timeout, + }; + const toImport = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < extractedDocuments.length; i++) { + const document = extractedDocuments[i]; + + // Documents are retrieved in the same order than we got them from user + const doc = existingDocuments[i]; + + if (!("error" in doc) && doc?.found) { + esRequest.operations.push({ + index: { + _id: document._id, + _index: alias, + }, + }); + esRequest.operations.push(document._source); + + toImport.push(document); + } else { + document._source._kuzzle_info = undefined; + + rejected.push({ + document: { + _id: document._id, + body: document._source, + }, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + + return this._mExecute(esRequest, toImport, rejected); + } + + /** + * Deletes multiple documents with one request + * + * @param {String} index - Index name + * @param {String} collection - Collection name + * @param {Array.} ids - Documents IDs + * @param {Object} options - timeout (undefined), refresh (undefined) + * + * @returns {Promise.<{ documents, errors }> + */ + async mDelete( + index: string, + collection: string, + ids: string[], + { + refresh, + }: { + refresh?: boolean | "wait_for"; + timeout?: number; + } = {} + ) { + const query = { ids: { values: [] } }; + const validIds = []; + const partialErrors = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < ids.length; i++) { + const _id = ids[i]; + + if (typeof _id === "string") { + validIds.push(_id); + } else { + partialErrors.push({ + _id, + reason: "document _id must be a string", + status: 400, + }); + } + } + /* end critical code section */ + await this.refreshCollection(index, collection); + + const { items } = await this.mGet(index, collection, validIds); + + let idx = 0; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < validIds.length; i++) { + const validId = validIds[i]; + const item = items[idx]; + + if (item && item._id === validId) { + query.ids.values.push(validId); + idx++; + } else { + partialErrors.push({ + _id: validId, + reason: "document not found", + status: 404, + }); + } + } + /* end critical code section */ + + // @todo duplicated query to get documents body, mGet here and search in + // deleteByQuery + const { documents } = await this.deleteByQuery(index, collection, query, { + refresh, + }); + + return { documents, errors: partialErrors }; + } + + /** + * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments + * Returns a standardized ES response object, containing the list of + * successfully performed operations, and the rejected ones + * + * @param {Object} esRequest - Elasticsearch request + * @param {Object[]} documents - Document sources (format: {_id, _source}) + * @param {Object[]} partialErrors - pre-rejected documents + * @param {Object} options - limits (true) + * + * @returns {Promise.} results + */ + async _mExecute( + esRequest: estypes.BulkRequest, + documents: JSONObject[], + partialErrors: JSONObject[] = [], + { limits = true, source = true } = {} + ) { + assertWellFormedRefresh(esRequest); + + if (this._hasExceededLimit(limits, documents)) { + return kerror.reject("services", "storage", "write_limit_exceeded"); + } + + let body = { items: [] }; + + if (documents.length > 0) { + try { + body = await this._client.bulk(esRequest); + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + const successes = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < body.items.length; i++) { + const item = body.items[i]; + const result = item[Object.keys(item)[0]]; + + if (result.status >= 400) { + if (result.status === 404) { + partialErrors.push({ + document: { + _id: documents[i]._id, + body: documents[i]._source, + }, + reason: "document not found", + status: result.status, + }); + } else { + partialErrors.push({ + document: documents[i], + reason: result.error.reason, + status: result.status, + }); + } + } else { + successes.push({ + _id: result._id, + _source: source ? documents[i]._source : undefined, + _version: result._version, + created: result.result === "created", + get: result.get, + result: result.result, + status: result.status, // used by mUpdate to get the full document body + }); + } + } + /* end critical code section */ + + return { + errors: partialErrors, // @todo rename items to documents + items: successes, + }; + } + + /** + * Extracts, injects metadata and validates documents contained + * in a Request + * + * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace + * + * @param {Object[]} documents - Documents + * @param {Object} metadata - Kuzzle metadata + * @param {Object} options - prepareMGet (false), requireId (false) + * + * @returns {Object} { rejected, extractedDocuments, documentsToGet } + */ + _extractMDocuments( + documents: JSONObject[], + metadata: JSONObject, + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} + ) { + const rejected = []; + const extractedDocuments = []; + const documentsToGet = []; + + /** + * @warning Critical code section + * + * request can contain more than 10K elements + */ + for (let i = 0; i < documents.length; i++) { + const document = documents[i]; + + if (!isPlainObject(document.body) && !prepareMUpsert) { + rejected.push({ + document, + reason: "document body must be an object", + status: 400, + }); + } else if (!isPlainObject(document.changes) && prepareMUpsert) { + rejected.push({ + document, + reason: "document changes must be an object", + status: 400, + }); + } else if ( + prepareMUpsert && + document.default && + !isPlainObject(document.default) + ) { + rejected.push({ + document, + reason: "document default must be an object", + status: 400, + }); + } else if (requireId && typeof document._id !== "string") { + rejected.push({ + document, + reason: "document _id must be a string", + status: 400, + }); + } else { + this._processExtract( + prepareMUpsert, + prepareMGet, + metadata, + document, + extractedDocuments, + documentsToGet + ); + } + } + /* end critical code section */ + + return { documentsToGet, extractedDocuments, rejected }; + } + + private _hasExceededLimit(limits: boolean, documents: JSONObject[]) { + return ( + limits && + documents.length > global.kuzzle.config.limits.documentsWriteCount + ); + } + + private _processExtract( + prepareMUpsert: boolean, + prepareMGet: boolean, + metadata: JSONObject, + document: JSONObject, + extractedDocuments: JSONObject[], + documentsToGet: JSONObject[] + ) { + let extractedDocument; + + if (prepareMUpsert) { + extractedDocument = { + _source: { + // Do not use destructuring, it's 10x slower + changes: Object.assign({}, metadata.doc, document.changes), + default: Object.assign( + {}, + metadata.upsert, + document.changes, + document.default + ), + }, + }; + } else { + extractedDocument = { + // Do not use destructuring, it's 10x slower + _source: Object.assign({}, metadata, document.body), + }; + } + + if (document._id) { + extractedDocument._id = document._id; + } + + extractedDocuments.push(extractedDocument); + + if (prepareMGet && typeof document._id === "string") { + documentsToGet.push({ + _id: document._id, + _source: false, + }); + } + } + + /** + * Throws an error if the provided mapping is invalid + * + * @param {Object} mapping + * @throws + */ + _checkMappings(mapping: JSONObject, path = [], check = true) { + const properties = Object.keys(mapping); + const mappingProperties = + path.length === 0 + ? ROOT_MAPPING_PROPERTIES + : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; + + for (const property of properties) { + if (check && !mappingProperties.includes(property)) { + const currentPath = [...path, property].join("."); + + throw kerror.get( + "services", + "storage", + "invalid_mapping", + currentPath, + didYouMean(property, mappingProperties) + ); + } + + if (property === "properties") { + // type definition level, we don't check + this._checkMappings(mapping[property], [...path, "properties"], false); + } else if (mapping[property]?.properties) { + // root properties level, check for "properties", "dynamic" and "_meta" + this._checkMappings(mapping[property], [...path, property], true); + } + } + } + + /** + * Given index + collection, returns the associated alias name. + * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Alias name (eg: '@&nepali.liia') + */ + _getAlias(index, collection) { + return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + } + + /** + * Given an alias name, returns the associated index name. + */ + async _checkIfAliasExists(aliasName) { + return this._client.indices.existsAlias({ + name: aliasName, + }); + } + + /** + * Given index + collection, returns the associated indice name. + * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Indice name (eg: '&nepali.liia') + * @throws If there is not exactly one indice associated + */ + async _getIndice(index: string, collection: string): Promise { + const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; + const body = await this._client.cat.aliases({ + format: "json", + name: alias, + }); + + if (body.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } else if (body.length > 1) { + throw kerror.get( + "services", + "storage", + "multiple_indice_alias", + `"alias" starting with "${ALIAS_PREFIX}"`, + '"indices"' + ); + } + + return body[0].index; + } + + /** + * Given an ES Request returns the settings of the corresponding indice. + * + * @param esRequest the ES Request with wanted settings. + * @return {Promise<*>} the settings of the indice. + * @private + */ + async _getSettings( + esRequest: estypes.IndicesGetSettingsRequest + ): Promise { + const response = await this._client.indices.getSettings(esRequest); + const index = esRequest.index as string; + + return response[index].settings; + } + + /** + * Given index + collection, returns an available indice name. + * Use this function when creating the associated indice. Otherwise use `_getAlias`. + * + * @param {String} index + * @param {String} collection + * + * @returns {String} Available indice name (eg: '&nepali.liia2') + */ + async _getAvailableIndice( + index: string, + collection: string + ): Promise { + let indice = this._getAlias(index, collection).substring( + INDEX_PREFIX_POSITION_IN_ALIAS + ); + + if (!(await this._client.indices.exists({ index: indice }))) { + return indice; + } + + let notAvailable; + let suffix; + do { + suffix = `.${randomNumber(100000)}`; + + const overflow = Buffer.from(indice + suffix).length - 255; + if (overflow > 0) { + const indiceBuffer = Buffer.from(indice); + indice = indiceBuffer + .subarray(0, indiceBuffer.length - overflow) + .toString(); + } + + notAvailable = await this._client.indices.exists({ + index: indice + suffix, + }); + } while (notAvailable); + + return indice + suffix; + } + + /** + * Given an indice, returns the associated alias name. + * + * @param {String} indice + * + * @returns {String} Alias name (eg: '@&nepali.liia') + * @throws If there is not exactly one alias associated that is prefixed with @ + */ + async _getAliasFromIndice(indice) { + const body = await this._client.indices.getAlias({ index: indice }); + const aliases = Object.keys(body[indice].aliases).filter((alias) => + alias.startsWith(ALIAS_PREFIX) + ); + + if (aliases.length < 1) { + throw kerror.get("services", "storage", "unknown_index_collection"); + } + + return aliases; + } + + /** + * Check for each indice whether it has an alias or not. + * When the latter is missing, create one based on the indice name. + * + * This check avoids a breaking change for those who were using Kuzzle before + * alias attribution for each indice turned into a standard (appear in 2.14.0). + */ + async generateMissingAliases() { + try { + const body = await this._client.cat.indices({ format: "json" }); + const indices = body.map(({ index: indice }) => indice); + const aliases = await this.listAliases(); + + const indicesWithoutAlias = indices.filter( + (indice) => + indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && + !aliases.some((alias) => alias.indice === indice) + ); + + const esRequest = { body: { actions: [] } }; + for (const indice of indicesWithoutAlias) { + esRequest.body.actions.push({ + add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, + }); + } + + if (esRequest.body.actions.length > 0) { + await this._client.indices.updateAliases(esRequest); + } + } catch (error) { + throw this._esWrapper.formatESError(error); + } + } + + /** + * Throws if index or collection includes forbidden characters + * + * @param {String} index + * @param {String} collection + */ + _assertValidIndexAndCollection(index, collection = null) { + if (!this.isIndexNameValid(index)) { + throw kerror.get("services", "storage", "invalid_index_name", index); + } + + if (collection !== null && !this.isCollectionNameValid(collection)) { + throw kerror.get( + "services", + "storage", + "invalid_collection_name", + collection + ); + } + } + + /** + * Given an alias, extract the associated index. + * + * @param {String} alias + * + * @returns {String} Index name + */ + _extractIndex(alias) { + return alias.substr( + INDEX_PREFIX_POSITION_IN_ALIAS + 1, + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 + ); + } + + /** + * Given an alias, extract the associated collection. + * + * @param {String} alias + * + * @returns {String} Collection name + */ + _extractCollection(alias) { + const separatorPos = alias.indexOf(NAME_SEPARATOR); + + return alias.substr(separatorPos + 1, alias.length); + } + + /** + * Given aliases, extract indexes and collections. + * + * @param {Array.} aliases + * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. + * + * @returns {Object.} Indexes as key and an array of their collections as value + */ + _extractSchema(aliases, { includeHidden = false } = {}) { + const schema = {}; + + for (const alias of aliases) { + const [indexName, collectionName] = alias + .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) + .split(NAME_SEPARATOR); + + if ( + alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && + (collectionName !== HIDDEN_COLLECTION || includeHidden) + ) { + if (!schema[indexName]) { + schema[indexName] = []; + } + + if (!schema[indexName].includes(collectionName)) { + schema[indexName].push(collectionName); + } + } + } + + return schema; + } + + /** + * Creates the hidden collection on the provided index if it does not already + * exists + * + * @param {String} index Index name + */ + async _createHiddenCollection(index) { + const mutex = new Mutex(`hiddenCollection/${index}`); + + try { + await mutex.lock(); + + if (await this._hasHiddenCollection(index)) { + return; + } + + const esRequest: estypes.IndicesCreateRequest = { + aliases: { + [this._getAlias(index, HIDDEN_COLLECTION)]: {}, + }, + index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), + settings: { + number_of_replicas: this._config.defaultSettings.number_of_replicas, + number_of_shards: this._config.defaultSettings.number_of_shards, + }, + wait_for_active_shards: await this._getWaitForActiveShards(), + }; + + await this._client.indices.create(esRequest); + } catch (e) { + throw this._esWrapper.formatESError(e); + } finally { + await mutex.unlock(); + } + } + + /** + * We need to always wait for a minimal number of shards to be available + * before answering to the client. This is to avoid Elasticsearch node + * to return a 404 Not Found error when the client tries to index a + * document in the index. + * To find the best value for this setting, we need to take into account + * the number of nodes in the cluster and the number of shards per index. + */ + async _getWaitForActiveShards(): Promise { + const body = await this._client.cat.nodes({ format: "json" }); + + const numberOfNodes = body.length; + + if (numberOfNodes > 1) { + return "all"; + } + + return 1; + } + + /** + * Scroll indice in elasticsearch and return all document that match the filter + * /!\ throws a write_limit_exceed error: this method is intended to be used + * by deleteByQuery and updateByQuery + * + * @param {Object} esRequest - Search request body + * + * @returns {Promise.} resolve to an array of documents + */ + async _getAllDocumentsFromQuery(esRequest: estypes.SearchRequest) { + let { hits, _scroll_id } = await this._client.search(esRequest); + const totalHitsValue = this._getHitsTotalValue(hits); + + if (totalHitsValue > global.kuzzle.config.limits.documentsWriteCount) { + throw kerror.get("services", "storage", "write_limit_exceeded"); + } + + let documents = hits.hits.map((h: JSONObject) => ({ + _id: h._id, + _source: h._source, + body: {}, + })); + + while (totalHitsValue !== documents.length) { + ({ hits, _scroll_id } = await this._client.scroll({ + scroll: esRequest.scroll, + scroll_id: _scroll_id, + })); + + documents = documents.concat( + hits.hits.map((h: JSONObject) => ({ + _id: h._id, + _source: h._source, + body: {}, + })) + ); + } + + await this.clearScroll(_scroll_id); + + return documents; + } + + /** + * Clean and normalize the searchBody + * Ensure only allowed parameters are passed to ES + * + * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) + */ + _sanitizeSearchBody(searchBody) { + // Only allow a whitelist of top level properties + for (const key of Object.keys(searchBody)) { + if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { + throw kerror.get("services", "storage", "invalid_search_query", key); + } + } + + // Ensure that the body does not include a script + this._scriptCheck(searchBody); + + // Avoid empty queries that causes ES to respond with an error. + // Empty queries are turned into match_all queries + if (_.isEmpty(searchBody.query)) { + searchBody.query = { match_all: {} }; + } + + return searchBody; + } + + /** + * Throw if a script is used in the query. + * + * Only Stored Scripts are accepted + * + * @param {Object} object + */ + _scriptCheck(object) { + for (const [key, value] of Object.entries(object)) { + if (this.scriptKeys.includes(key)) { + for (const scriptArg of Object.keys(value)) { + if (!this.scriptAllowedArgs.includes(scriptArg)) { + throw kerror.get( + "services", + "storage", + "invalid_query_keyword", + `${key}.${scriptArg}` + ); + } + } + } + // Every object must be checked here, even the ones nested into an array + else if (typeof value === "object" && value !== null) { + this._scriptCheck(value); + } + } + } + + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isCollectionNameValid(name) { + return _isObjectNameValid(name); + } + + /** + * Checks if a collection name is valid + * @param {string} name + * @returns {Boolean} + */ + isIndexNameValid(name) { + return _isObjectNameValid(name); + } + + /** + * Clears an allocated scroll + * @param {[type]} id [description] + * @returns {[type]} [description] + */ + async clearScroll(id?: string) { + if (id) { + debug("clearing scroll: %s", id); + await this._client.clearScroll({ scroll_id: id }); + } + } + + /** + * Loads a configuration value from services.storageEngine and assert a valid + * ms format. + * + * @param {String} key - relative path to the key in configuration + * + * @returns {Number} milliseconds + */ + _loadMsConfig(key) { + const configValue = _.get(this._config, key); + + assert( + typeof configValue === "string", + `services.storageEngine.${key} must be a string.` + ); + + const parsedValue = ms(configValue); + + assert( + typeof parsedValue === "number", + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` + ); + + return parsedValue; + } + + /** + * Returns true if one of the mappings dynamic property changes value from + * false to true + */ + _dynamicChanges(previousMappings, newMappings) { + const previousValues = findDynamic(previousMappings); + + for (const [path, previousValue] of Object.entries(previousValues)) { + if (previousValue.toString() !== "false") { + continue; + } + + const newValue = _.get(newMappings, path); + + if (newValue && newValue.toString() !== "false") { + return true; + } + } + + return false; + } + + async waitForElasticsearch() { + if (esState !== esStateEnum.NONE) { + while (esState !== esStateEnum.OK) { + await Bluebird.delay(1000); + } + + return; + } + + esState = esStateEnum.AWAITING; + + global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); + + while (esState !== esStateEnum.OK) { + try { + // Wait for at least 1 shard to be initialized + const health = await this._client.cluster.health({ + wait_for_no_initializing_shards: true, + }); + + if (health.number_of_pending_tasks === 0) { + global.kuzzle.log.info("[✔] Elasticsearch is ready"); + esState = esStateEnum.OK; + } else { + global.kuzzle.log.info( + `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining` + ); + await Bluebird.delay(1000); + } + } catch (e) { + await Bluebird.delay(1000); + } + } + } + + /** + * Checks if the dynamic properties are correct + */ + _checkDynamicProperty(mappings) { + const dynamicProperties = findDynamic(mappings); + for (const [path, value] of Object.entries(dynamicProperties)) { + // Prevent common mistake + if (typeof value === "boolean") { + _.set(mappings, path, value.toString()); + } else if (typeof value !== "string") { + throw kerror.get( + "services", + "storage", + "invalid_mapping", + path, + "Dynamic property value should be a string." + ); + } + + if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { + throw kerror.get( + "services", + "storage", + "invalid_mapping", + path, + `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( + '", "' + )}"` + ); + } + } + } + + _setLastActionToKuzzleMeta( + esRequest: JSONObject, + alias: string, + kuzzleMeta: JSONObject + ) { + /** + * @warning Critical code section + * + * bulk body can contain more than 10K elements + */ + let lastAction = ""; + const actionNames = ["index", "create", "update", "delete"]; + + for (let i = 0; i < esRequest.operations.length; i++) { + const item = esRequest.operations[i]; + const action = Object.keys(item)[0]; + + if (actionNames.indexOf(action) !== -1) { + lastAction = action; + + item[action]._index = alias; + + if (item[action]?._type) { + item[action]._type = undefined; + } + } else if (lastAction === "index" || lastAction === "create") { + item._kuzzle_info = kuzzleMeta.created; + } else if (lastAction === "update") { + this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); + } + } + /* end critical code section */ + } + + _setLastActionToKuzzleMetaUpdate(item: JSONObject, kuzzleMeta: JSONObject) { + for (const prop of ["doc", "upsert"]) { + if (isPlainObject(item[prop])) { + item[prop]._kuzzle_info = kuzzleMeta.updated; + } + } + } + + _getHitsTotalValue(hits: estypes.SearchHitsMetadata): number { + if (typeof hits.total === "number") { + return hits.total; + } + + return hits.total.value; + } +} + +/** + * Finds paths and values of mappings dynamic properties + * + * @example + * + * findDynamic(mappings); + * { + * "properties.metadata.dynamic": "true", + * "properties.user.properties.address.dynamic": "strict" + * } + */ +function findDynamic(mappings, path = [], results = {}) { + if (mappings.dynamic !== undefined) { + results[path.concat("dynamic").join(".")] = mappings.dynamic; + } + + for (const [key, value] of Object.entries(mappings)) { + if (isPlainObject(value)) { + findDynamic(value, path.concat(key), results); + } + } + + return results; +} + +/** + * Forbids the use of the _routing ES option + * + * @param {Object} esRequest + * @throws + */ +function assertNoRouting(esRequest) { + if (esRequest._routing) { + throw kerror.get("services", "storage", "no_routing"); + } +} + +/** + * Checks if the optional "refresh" argument is well-formed + * + * @param {Object} esRequest + * @throws + */ +function assertWellFormedRefresh(esRequest) { + if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { + throw kerror.get( + "services", + "storage", + "invalid_argument", + "refresh", + '"wait_for", false' + ); + } +} + +function getKuid(userId: string): string | null { + if (!userId) { + return null; + } + + return String(userId); +} + +/** + * Checks if an index or collection name is valid + * + * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html + * + * Beware of the length check: ES allows indice names up to 255 bytes, but since + * in Kuzzle we emulate collections as indices, we have to make sure + * that the privacy prefix, the index name, the separator and the collection + * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way + * is to limit index and collection names to 126 bytes and document that + * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) + * + * @param {string} name + * @returns {Boolean} + */ +function _isObjectNameValid(name: string): boolean { + if (typeof name !== "string" || name.length === 0) { + return false; + } + + if (name.toLowerCase() !== name) { + return false; + } + + if (Buffer.from(name).length > 126) { + return false; + } + + if (name === "_all") { + return false; + } + + let valid = true; + + for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { + valid = !name.includes(FORBIDDEN_CHARS[i]); + } + + return valid; +} diff --git a/lib/service/storage/esWrapper.js b/lib/service/storage/8/esWrapper.js similarity index 97% rename from lib/service/storage/esWrapper.js rename to lib/service/storage/8/esWrapper.js index 8b8b760cf8..5e13eca6b2 100644 --- a/lib/service/storage/esWrapper.js +++ b/lib/service/storage/8/esWrapper.js @@ -25,11 +25,13 @@ const Bluebird = require("bluebird"); const _ = require("lodash"); -const es = require("@elastic/elasticsearch"); +const es = require("sdk-es8"); -const { KuzzleError } = require("../../kerror/errors"); -const debug = require("../../util/debug")("kuzzle:services:storage:ESCommon"); -const kerror = require("../../kerror").wrap("services", "storage"); +const { KuzzleError } = require("../../../kerror/errors"); +const debug = require("../../../util/debug")( + "kuzzle:services:storage:ESCommon", +); +const kerror = require("../../../kerror").wrap("services", "storage"); const errorMessagesMapping = [ { diff --git a/lib/service/storage/queryTranslator.js b/lib/service/storage/commons/queryTranslator.js similarity index 99% rename from lib/service/storage/queryTranslator.js rename to lib/service/storage/commons/queryTranslator.js index b76b08ed8f..0d296949ca 100644 --- a/lib/service/storage/queryTranslator.js +++ b/lib/service/storage/commons/queryTranslator.js @@ -21,7 +21,7 @@ "use strict"; -const kerror = require("../../kerror"); +const kerror = require("../../../kerror"); class KeywordError extends Error { constructor(type, name) { diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 144e2ec502..9ff4b02a5b 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -1,3857 +1,55 @@ -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +import { Client as ClientES7 } from "sdk-es7"; +import { Client as ClientES8 } from "sdk-es8"; -import _ from "lodash"; +import { ES7 } from "./7/elasticsearch"; +import { ES8 } from "./8/elasticsearch"; -import { - Client as StorageClient, - ClientOptions, - estypes, -} from "@elastic/elasticsearch"; -import { - InfoResult, - JSONObject, - KImportError, - KRequestBody, - KRequestParams, - KStats, - KStatsIndexes, - KUpdateResponse, -} from "../../types/storage/Elasticsearch"; - -import assert from "assert"; - -import ms from "ms"; -import Bluebird from "bluebird"; -import semver from "semver"; -import debug from "../../util/debug"; - -import ESWrapper from "./esWrapper"; -import QueryTranslator from "./queryTranslator"; -import didYouMean from "../../util/didYouMean"; import Service from "../service"; -import * as kerror from "../../kerror"; -import { assertIsObject } from "../../util/requestAssertions"; -import { isPlainObject } from "../../util/safeObject"; import scopeEnum from "../../core/storage/storeScopeEnum"; -import extractFields from "../../util/extractFields"; -import { Mutex } from "../../util/mutex"; -import { randomNumber } from "../../util/name-generator"; - -debug("kuzzle:services:elasticsearch"); - -const SCROLL_CACHE_PREFIX = "_docscroll_"; - -const ROOT_MAPPING_PROPERTIES = [ - "properties", - "_meta", - "dynamic", - "dynamic_templates", -]; -const CHILD_MAPPING_PROPERTIES = ["type"]; - -// Used for collection emulation -const HIDDEN_COLLECTION = "_kuzzle_keep"; -const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS -const PRIVATE_PREFIX = "%"; -const PUBLIC_PREFIX = "&"; -const INDEX_PREFIX_POSITION_IN_INDICE = 0; -const INDEX_PREFIX_POSITION_IN_ALIAS = 1; -const NAME_SEPARATOR = "."; -const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; -const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; - -// used to check whether we need to wait for ES to initialize or not -enum esStateEnum { - AWAITING = 1, - NONE = 2, - OK = 3, -} - -let esState = esStateEnum.NONE; -/** - * @param {Kuzzle} kuzzle kuzzle instance - * @param {Object} config Service configuration - * @param {storeScopeEnum} scope - * @constructor - */ -export default class ElasticSearch extends Service { - public _client: StorageClient; - public _scope: scopeEnum; - public _indexPrefix: string; - public _esWrapper: ESWrapper; - public _esVersion: any; - public _translator: QueryTranslator; - public searchBodyKeys: string[]; - public scriptKeys: string[]; - public scriptAllowedArgs: string[]; - public maxScrollDuration: number; - public scrollTTL: number; - public _config: any; +export class Elasticsearch extends Service { + private _client: any; - static buildClient(config: ClientOptions): StorageClient { - return new StorageClient(config); + get client() { + return this._client; } - constructor(config, scope = scopeEnum.PUBLIC) { + constructor(config: any, scope = scopeEnum.PUBLIC) { super("elasticsearch", config); - this._scope = scope; - this._indexPrefix = - scope === scopeEnum.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; - - this._client = null; - this._esWrapper = null; - this._esVersion = null; - this._translator = new QueryTranslator(); - - // Allowed root key of a search query - this.searchBodyKeys = [ - "aggregations", - "aggs", - "collapse", - "explain", - "fields", - "from", - "highlight", - "query", - "search_after", - "search_timeout", - "size", - "sort", - "suggest", - "_name", - "_source", - "_source_excludes", - "_source_includes", - ]; - - /** - * Only allow stored-scripts in queries - */ - this.scriptKeys = ["script", "_script"]; - this.scriptAllowedArgs = ["id", "params"]; - - this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); - - this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); - } - - get scope() { - return this._scope; - } - - /** - * Initializes the elasticsearch client - * - * @override - * @returns {Promise} - */ - async _initSequence() { - if (this._client) { - return; - } - - if ( - global.NODE_ENV !== "development" && - this._config.commonMapping.dynamic === "true" - ) { - global.kuzzle.log.warn( - [ - "Your dynamic mapping policy is set to 'true' for new fields.", - "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", - 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n") - ); - } - - this._client = ElasticSearch.buildClient(this._config.client); - - await this.waitForElasticsearch(); - - this._esWrapper = new ESWrapper(this._client); - - const { version } = await this._client.info(); - - if (version && !semver.satisfies(semver.coerce(version.number), "^8.0.0")) { - throw kerror.get( - "services", - "storage", - "version_mismatch", - version.number - ); - } - - this._esVersion = version; - } - - /** - * Translate Koncorde filters to Elasticsearch query - * - * @param {Object} filters - Set of valid Koncorde filters - * @returns {Object} Equivalent Elasticsearch query - */ - translateKoncordeFilters(filters) { - return this._translator.translate(filters); - } - - /** - * Returns some basic information about this service - * @override - * - * @returns {Promise.} service informations - */ - async info(): Promise { - const result: InfoResult = { - type: "elasticsearch", - version: this._esVersion, - }; - - try { - const info = await this._client.info(); - result.version = info.version.number; - result.lucene = info.version.lucene_version; - - const health = await this._client.cluster.health(); - result.status = health.status; - - const stats = await this._client.cluster.stats({ human: true }); - result.spaceUsed = stats.indices.store.size; - result.nodes = stats.nodes; - return result; - } catch (error) { - return this._esWrapper.reject(error); - } - } - - /** - * Returns detailed multi-level storage stats data - * - * @returns {Promise.} - */ - async stats(): Promise { - const esRequest: estypes.IndicesStatsRequest = { - metric: ["docs", "store"], - }; - - const stats = await this._client.indices.stats(esRequest); - const indexes: KStatsIndexes = {}; - let size = 0; - - for (const [indice, indiceInfo] of Object.entries(stats.indices)) { - const infos = indiceInfo as any; - // Ignore non-Kuzzle indices - if ( - !indice.startsWith(PRIVATE_PREFIX) && - !indice.startsWith(PUBLIC_PREFIX) - ) { - continue; - } - - const aliases = await this._getAliasFromIndice(indice); - const alias = aliases[0]; - const indexName = this._extractIndex(alias); - const collectionName = this._extractCollection(alias); - - if ( - alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || - collectionName === HIDDEN_COLLECTION - ) { - continue; - } - - if (!indexes[indexName]) { - indexes[indexName] = { - collections: [], - name: indexName, - size: 0, - }; - } - - indexes[indexName].collections.push({ - documentCount: infos.total.docs.count, - name: collectionName, - size: infos.total.store.size_in_bytes, - }); - - indexes[indexName].size += infos.total.store.size_in_bytes; - size += infos.total.store.size_in_bytes; - } - - return { - indexes: Object.values(indexes), - size, - }; - } - - /** - * Scrolls results from previous elasticsearch query. - * Automatically clears the scroll context after the last result page has - * been fetched. - * - * @param {String} scrollId - Scroll identifier - * @param {Object} options - scrollTTL (default scrollTTL) - * - * @returns {Promise.<{ scrollId, hits, aggregations, total }>} - */ - async scroll(scrollId: string, { scrollTTL }: { scrollTTL?: string } = {}) { - const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; - const esRequest: estypes.ScrollRequest = { - scroll: _scrollTTL, - scroll_id: scrollId, - }; - - const cacheKey = - SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); - - debug("Scroll: %o", esRequest); - - if (_scrollTTL) { - const scrollDuration = ms(_scrollTTL); - - if (scrollDuration > this.maxScrollDuration) { - throw kerror.get( - "services", - "storage", - "scroll_duration_too_great", - _scrollTTL - ); - } - } - - const stringifiedScrollInfo = await global.kuzzle.ask( - "core:cache:internal:get", - cacheKey - ); - - if (!stringifiedScrollInfo) { - throw kerror.get("services", "storage", "unknown_scroll_id"); - } - - const scrollInfo = JSON.parse(stringifiedScrollInfo); - - try { - const body = await this._client.scroll(esRequest); - const totalHitsValue = this._getHitsTotalValue(body.hits); - - scrollInfo.fetched += body.hits.hits.length; - - if (scrollInfo.fetched >= totalHitsValue) { - debug("Last scroll page fetched: deleting scroll %s", body._scroll_id); - await global.kuzzle.ask("core:cache:internal:del", cacheKey); - await this.clearScroll(body._scroll_id); - } else { - await global.kuzzle.ask( - "core:cache:internal:store", - cacheKey, - JSON.stringify(scrollInfo), - { - ttl: ms(_scrollTTL) || this.scrollTTL, - } + if (config.majorVersion === 7) { + if (scope === scopeEnum.PUBLIC) { + console.warn( + "Elasticsearch 7 is deprecated and will be removed in the next major release." ); + console.warn("Please consider upgrading your Elasticsearch version."); + console.warn("Update your configuration to set 'majorVersion' to 8."); + console.warn("Under the key service.storageEngine.majorVersion"); } - - const remaining = totalHitsValue - scrollInfo.fetched; - - return await this._formatSearchResult(body, remaining, scrollInfo); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Searches documents from elasticsearch with a query - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} searchBody - Search request body (query, sort, etc.) - * @param {Object} options - from (undefined), size (undefined), scroll (undefined) - * - * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} - */ - async search( - { - index, - collection, - searchBody, - targets, - }: { - index?: string; - collection?: string; - searchBody?: JSONObject; - targets?: any[]; - } = {}, - { - from, - size, - scroll, - }: { - from?: number; - size?: number; - scroll?: string; - } = {} - ) { - let esIndexes: any; - - if (targets && targets.length > 0) { - const indexes = new Set(); - for (const target of targets) { - for (const targetCollection of target.collections) { - const alias = this._getAlias(target.index, targetCollection); - - indexes.add(alias); - } - } - - esIndexes = Array.from(indexes).join(","); + this._client = new ES7(config, scope); + } else if (config.majorVersion === 8) { + this._client = new ES8(config, scope); } else { - esIndexes = this._getAlias(index, collection); - } - - const esRequest: estypes.SearchRequest = { - ...this._sanitizeSearchBody(searchBody), - from, - index: esIndexes, - scroll, - size, - track_total_hits: true, - }; - - if (scroll) { - const scrollDuration = ms(scroll); - - if (scrollDuration > this.maxScrollDuration) { - throw kerror.get( - "services", - "storage", - "scroll_duration_too_great", - scroll - ); - } - } - - debug("Search: %j", esRequest); - - try { - const body = await this._client.search(esRequest); - const totalHitsValue = this._getHitsTotalValue(body.hits); - let remaining: number; - - if (body._scroll_id) { - const ttl = - (esRequest.scroll && ms(esRequest.scroll)) || - ms(this._config.defaults.scrollTTL); - - await global.kuzzle.ask( - "core:cache:internal:store", - SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), - JSON.stringify({ - collection, - fetched: body.hits.hits.length, - index, - targets, - }), - { ttl } - ); - - remaining = totalHitsValue - body.hits.hits.length; - } - - return await this._formatSearchResult(body, remaining, { - collection, - index, - targets, - }); - } catch (error) { - console.error(error); - throw this._esWrapper.formatESError(error); - } - } - - /** - * Generate a map that associate an alias to a pair of index and collection - * - * @param {*} targets - * @returns - */ - _mapTargetsToAlias(targets) { - const aliasToTargets = {}; - - for (const target of targets) { - for (const targetCollection of target.collections) { - const alias = this._getAlias(target.index, targetCollection); - if (!aliasToTargets[alias]) { - aliasToTargets[alias] = { - collection: targetCollection, - index: target.index, - }; - } - } - } - - return aliasToTargets; - } - - async _formatSearchResult( - body: any, - remaining?: number, - searchInfo: any = {} - ) { - let aliasToTargets = {}; - const aliasCache = new Map(); - - if (searchInfo.targets) { - /** - * We need to map the alias to the target index and collection, - * so we can later retrieve informations about an index & collection - * based on its alias. - */ - aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); - } - - const formatHit = async (hit) => { - let index = searchInfo.index; - let collection = searchInfo.collection; - - /** - * If the search has been done on multiple targets, we need to - * retrieve the appropriate index and collection based on the alias - */ - if (hit._index && searchInfo.targets) { - // Caching to reduce call to ES - let aliases = aliasCache.get(hit._index); - if (!aliases) { - // Retrieve all the alias associated to one index - aliases = await this._getAliasFromIndice(hit._index); - aliasCache.set(hit._index, aliases); - } - - /** - * Since multiple alias can point to the same index in ES, we need to - * find the first alias that exists in the map of aliases associated - * to the targets. - */ - const alias = aliases.find((_alias) => aliasToTargets[_alias]); - // Retrieve index and collection information based on the matching alias - index = aliasToTargets[alias].index; - collection = aliasToTargets[alias].collection; - } - - return { - _id: hit._id, - _score: hit._score, - _source: hit._source, - collection, - highlight: hit.highlight, - index, - }; - }; - - async function formatInnerHits(innerHits) { - if (!innerHits) { - return undefined; - } - - const formattedInnerHits = {}; - for (const [name, innerHit] of Object.entries(innerHits)) { - formattedInnerHits[name] = await Bluebird.map( - (innerHit as any).hits.hits, - formatHit - ); - } - return formattedInnerHits; - } - - const hits = await Bluebird.map(body.hits.hits, async (hit) => ({ - inner_hits: await formatInnerHits(hit.inner_hits), - ...(await formatHit(hit)), - })); - - return { - aggregations: body.aggregations, - hits, - remaining, - scrollId: body._scroll_id, - suggest: body.suggest, - total: body.hits.total.value, - }; - } - - /** - * Gets the document with given ID - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document ID - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async get(index, collection, id) { - const esRequest: estypes.GetRequest = { - id, - index: this._getAlias(index, collection), - }; - - // Just in case the user make a GET on url /mainindex/test/_search - // Without this test we return something weird: a result.hits.hits with all - // document without filter because the body is empty in HTTP by default - if (esRequest.id === "_search") { - return kerror.reject("services", "storage", "search_as_an_id"); - } - - debug("Get document: %o", esRequest); - - try { - const body = await this._client.get(esRequest); - - return { - _id: body._id, - _source: body._source, - _version: body._version, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Returns the list of documents matching the ids given in the body param - * NB: Due to internal Kuzzle mechanism, can only be called on a single - * index/collection, using the body { ids: [.. } syntax. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Document IDs - * - * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} - */ - async mGet(index: string, collection: string, ids: string[]) { - if (ids.length === 0) { - return { errors: [], item: [] }; - } - - const esRequest: estypes.MgetRequest = { - docs: ids.map((_id) => ({ - _id, - _index: this._getAlias(index, collection), - })), - }; - - debug("Multi-get documents: %o", esRequest); - - let body: estypes.MgetResponse>; - - try { - body = await this._client.mget(esRequest); // NOSONAR - } catch (e) { - throw this._esWrapper.formatESError(e); - } - - const errors = []; - const items = []; - - for (const doc of body.docs) { - if (!("error" in doc) && doc.found) { - items.push({ - _id: doc._id, - _source: doc._source, - _version: doc._version, - }); - } else { - errors.push(doc._id); - } - } - - return { errors, items }; - } - - /** - * Counts how many documents match the filter given in body - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} searchBody - Search request body (query, sort, etc.) - * - * @returns {Promise.} count - */ - async count(index: string, collection: string, searchBody = {}) { - const esRequest: estypes.CountRequest = { - ...this._sanitizeSearchBody(searchBody), - index: this._getAlias(index, collection), - }; - - debug("Count: %o", esRequest); - - try { - const body = await this._client.count(esRequest); - return body.count; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Sends the new document to elasticsearch - * Cleans data to match elasticsearch specifications - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} content - Document content - * @param {Object} options - id (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { _id, _version, _source } - */ - async create( - index: string, - collection: string, - content: JSONObject, - { - id, - refresh, - userId = null, - injectKuzzleMeta = true, - }: { - id?: string; - refresh?: boolean | "wait_for"; - userId?: string; - injectKuzzleMeta?: boolean; - } = {} - ) { - assertIsObject(content); - - const esRequest: estypes.IndexRequest> = { - document: content, - id, - index: this._getAlias(index, collection), - op_type: id ? "create" : "index", - refresh, - }; - - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - - // Add metadata - if (injectKuzzleMeta) { - esRequest.document._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }; - } - - debug("Create document: %o", esRequest); - - try { - const body = await this._client.index(esRequest); - - return { - _id: body._id, - _source: esRequest.document, - _version: body._version, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Creates a new document to ElasticSearch, or replace it if it already exist - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Document content - * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) - * - * @returns {Promise.} { _id, _version, _source, created } - */ - async createOrReplace( - index, - collection, - id, - content, - { - refresh, - userId = null, - injectKuzzleMeta = true, - }: { - refresh?: boolean | "wait_for"; - userId?: string; - injectKuzzleMeta?: boolean; - } = {} - ) { - const esRequest: estypes.IndexRequest> = { - document: content, - id, - index: this._getAlias(index, collection), - refresh, - }; - - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - - // Add metadata - if (injectKuzzleMeta) { - esRequest.document._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - - debug("Create or replace document: %o", esRequest); - - try { - const body = await this._client.index(esRequest); - - return { - _id: body._id, - _source: esRequest.document, - _version: body._version, - created: body.result === "created", // Needed by the notifier - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Sends the partial document to elasticsearch with the id to update - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Updated content - * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) - * - * @returns {Promise.<{ _id, _version }>} - */ - async update( - index: string, - collection: string, - id: string, - content: JSONObject, - { - refresh, - userId = null, - retryOnConflict, - injectKuzzleMeta = true, - }: { - refresh?: boolean | "wait_for"; - userId?: string; - retryOnConflict?: number; - injectKuzzleMeta?: boolean; - } = {} - ): Promise { - const esRequest: estypes.UpdateRequest< - KRequestBody, - KRequestBody - > = { - _source: true, - doc: content, - id, - index: this._getAlias(index, collection), - refresh, - retry_on_conflict: - retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }; - - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - - if (injectKuzzleMeta) { - // Add metadata - esRequest.doc._kuzzle_info = { - ...esRequest.doc._kuzzle_info, - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - - debug("Update document: %o", esRequest); - - try { - const body = await this._client.update(esRequest); - - return { - _id: body._id, - _source: body.get._source, - _version: body._version, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Sends the partial document to elasticsearch with the id to update - * Creates the document if it doesn't already exist - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Updated content - * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) - * - * @returns {Promise.<{ _id, _version }>} - */ - async upsert( - index: string, - collection: string, - id: string, - content: JSONObject, - { - defaultValues = {}, - refresh, - userId = null, - retryOnConflict, - injectKuzzleMeta = true, - }: { - defaultValues?: JSONObject; - refresh?: boolean | "wait_for"; - userId?: string; - retryOnConflict?: number; - injectKuzzleMeta?: boolean; - } = {} - ) { - const esRequest: estypes.UpdateRequest< - KRequestBody, - KRequestBody - > = { - _source: true, - doc: content, - id, - index: this._getAlias(index, collection), - refresh, - retry_on_conflict: - retryOnConflict || this._config.defaults.onUpdateConflictRetries, - upsert: { ...defaultValues, ...content }, - }; - - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - - // Add metadata - const user = getKuid(userId); - const now = Date.now(); - - if (injectKuzzleMeta) { - esRequest.doc._kuzzle_info = { - ...esRequest.doc._kuzzle_info, - updatedAt: now, - updater: user, - }; - esRequest.upsert._kuzzle_info = { - ...esRequest.upsert._kuzzle_info, - author: user, - createdAt: now, - }; - } - - debug("Upsert document: %o", esRequest); - - try { - const body = await this._client.update(esRequest); - - return { - _id: body._id, - _source: body.get._source, - _version: body._version, - created: body.result === "created", - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Replaces a document to ElasticSearch - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Document content - * @param {Object} options - refresh (undefined), userId (null) - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async replace( - index: string, - collection: string, - id: string, - content: JSONObject, - { - refresh, - userId = null, - injectKuzzleMeta = true, - }: { - refresh?: boolean | "wait_for"; - userId?: string; - injectKuzzleMeta?: boolean; - } = {} - ) { - const alias = this._getAlias(index, collection); - const esRequest: estypes.IndexRequest> = { - document: content, - id, - index: alias, - refresh, - }; - - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - - if (injectKuzzleMeta) { - // Add metadata - esRequest.document._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - - try { - const exists = await this._client.exists({ id, index: alias }); - - if (!exists) { - throw kerror.get( - "services", - "storage", - "not_found", - id, - index, - collection - ); - } - - debug("Replace document: %o", esRequest); - - const body = await this._client.index(esRequest); - - return { - _id: id, - _source: esRequest.document, - _version: body._version, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Sends to elasticsearch the document id to delete - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} options - refresh (undefined) - * - * @returns {Promise} - */ - async delete( - index: string, - collection: string, - id: string, - { - refresh, - }: { - refresh?: boolean | "wait_for"; - } = {} - ) { - const esRequest: estypes.DeleteRequest = { - id, - index: this._getAlias(index, collection), - refresh, - }; - - assertWellFormedRefresh(esRequest); - - debug("Delete document: %o", esRequest); - - try { - await this._client.delete(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - return null; - } - - /** - * Deletes all documents matching the provided filters. - * If fetch=false, the max documents write limit is not applied. - * - * Options: - * - size: size of the batch to retrieve documents (no-op if fetch=false) - * - refresh: refresh option for ES - * - fetch: if true, will fetch the documents before delete them - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} options - size (undefined), refresh (undefined), fetch (true) - * - * @returns {Promise.<{ documents, total, deleted, failures: Array<{ id, reason }> }>} - */ - async deleteByQuery( - index: string, - collection: string, - query: JSONObject, - { - refresh, - size = 1000, - fetch = true, - }: { - refresh?: boolean | "wait_for"; - size?: number; - fetch?: boolean; - } = {} - ) { - const esRequest = { - ...this._sanitizeSearchBody({ query }), - index: this._getAlias(index, collection), - scroll: "5s", - } satisfies estypes.DeleteByQueryRequest | estypes.SearchRequest; - - if (!isPlainObject(query)) { - throw kerror.get("services", "storage", "missing_argument", "body.query"); - } - - try { - let documents = []; - - if (fetch) { - documents = await this._getAllDocumentsFromQuery({ - ...esRequest, - size, - }); - } - - debug("Delete by query: %o", esRequest); - - esRequest.refresh = refresh === "wait_for" ? true : refresh; - - const request = { - ...esRequest, - max_docs: size, - }; - - if (request.max_docs === -1) { - request.max_docs = undefined; - } - - const body = await this._client.deleteByQuery(request); - - return { - deleted: body.deleted, - documents, - failures: body.failures.map(({ id, cause }) => ({ - id, - reason: cause.reason, - })), - total: body.total, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Delete fields of a document and replace it - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Array} fields - Document fields to be removed - * @param {Object} options - refresh (undefined), userId (null) - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async deleteFields( - index: string, - collection: string, - id: string, - fields: string, - { - refresh, - userId = null, - }: { - refresh?: boolean | "wait_for"; - userId?: string; - } = {} - ) { - const alias = this._getAlias(index, collection); - const esRequest: estypes.GetRequest = { - id, - index: alias, - }; - - try { - debug("DeleteFields document: %o", esRequest); - const body = await this._client.get(esRequest); - - for (const field of fields) { - if (_.has(body._source, field)) { - _.set(body._source, field, undefined); - } - } - - const updatedInfos = { - updatedAt: Date.now(), - updater: getKuid(userId), - }; - - if (typeof body._source._kuzzle_info === "object") { - body._source._kuzzle_info = { - ...body._source._kuzzle_info, - ...updatedInfos, - }; - } else { - body._source._kuzzle_info = updatedInfos; - } - - const newEsRequest: estypes.IndexRequest = { - document: body._source, - id, - index: alias, - refresh, - }; - - assertNoRouting(newEsRequest); - assertWellFormedRefresh(newEsRequest); - - const updated = await this._client.index(newEsRequest); - - return { - _id: id, - _source: body._source, - _version: updated._version, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Updates all documents matching the provided filters - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} changes - Changes wanted on documents - * @param {Object} options - refresh (undefined), size (undefined) - * - * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} - */ - async updateByQuery( - index: string, - collection: string, - query: JSONObject, - changes: JSONObject, - { - refresh, - size = 1000, - userId = null, - }: { - refresh?: boolean | "wait_for"; - size?: number; - userId?: string; - } = {} - ) { - try { - const esRequest: estypes.SearchRequest = { - ...this._sanitizeSearchBody({ query }), - index: this._getAlias(index, collection), - scroll: "5s", - size, - }; - - const documents = await this._getAllDocumentsFromQuery(esRequest); - - for (const document of documents) { - document._source = undefined; - document.body = changes; - } - - debug("Update by query: %o", esRequest); - - const { errors, items } = await this.mUpdate( - index, - collection, - documents, - { refresh, userId } - ); - - return { - errors, - successes: items, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); + throw new Error("Invalid Elasticsearch version."); } } - /** - * Updates all documents matching the provided filters - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} changes - Changes wanted on documents - * @param {Object} options - refresh (undefined) - * - * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} - */ - async bulkUpdateByQuery( - index: string, - collection: string, - query: JSONObject, - changes: JSONObject, - { - refresh = false, - }: { - refresh?: boolean; - } = {} - ) { - const script = { - params: {}, - source: "", - }; - - const flatChanges = extractFields(changes, { alsoExtractValues: true }); - - for (const { key, value } of flatChanges) { - script.source += `ctx._source.${key} = params['${key}'];`; - script.params[key] = value; - } - - const esRequest: estypes.UpdateByQueryRequest = { - index: this._getAlias(index, collection), - query: this._sanitizeSearchBody({ query }).query, - refresh, - script, - }; - - debug("Bulk Update by query: %o", esRequest); - - let response: estypes.UpdateByQueryResponse; - - try { - response = await this._client.updateByQuery(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); + static buildClient(config: any, version?: 7 | 8): any { + if (!version) { + version = 7; } - if (response.failures.length) { - const errors = response.failures.map(({ id, cause }) => ({ - cause, - id, - })); - - throw kerror.get( - "services", - "storage", - "incomplete_update", - response.updated, - errors - ); + switch (version) { + case 7: + return new ClientES7(config); + case 8: + return new ClientES8(config); + default: + throw new Error("Invalid Elasticsearch version."); } - - return { - updated: response.updated, - }; } - /** - * Execute the callback with a batch of documents of specified size until all - * documents matched by the query have been processed. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Function} callback - callback that will be called with the "hits" array - * @param {Object} options - size (10), scrollTTL ('5s') - * - * @returns {Promise.} Array of results returned by the callback - */ - async mExecute( - index: string, - collection: string, - query: JSONObject, - callback: any, - { - size = 10, - scrollTTl = "5s", - }: { - size?: number; - scrollTTl?: string; - } = {} - ): Promise { - const esRequest: estypes.SearchRequest = { - ...this._sanitizeSearchBody({ query }), - from: 0, - index: this._getAlias(index, collection), - scroll: scrollTTl, - size, - }; - - if (!isPlainObject(query)) { - throw kerror.get("services", "storage", "missing_argument", "body.query"); - } - - const results = []; - let processed = 0; - let scrollId = null; - - try { - let body = await this._client.search(esRequest); - const totalHitsValue = this._getHitsTotalValue(body.hits); - - while (processed < totalHitsValue && body.hits.hits.length > 0) { - scrollId = body._scroll_id; - results.push(await callback(body.hits.hits)); - processed += body.hits.hits.length; - - body = await this._client.scroll({ - scroll: esRequest.scroll, - scroll_id: scrollId, - }); - } - } finally { - await this.clearScroll(scrollId); - } - - return results; - } - - /** - * Creates a new index. - * - * This methods creates an hidden collection in the provided index to be - * able to list it. - * This methods resolves if the index name does not already exists either as - * private or public index. - * - * @param {String} index - Index name - * - * @returns {Promise} - */ - async createIndex(index: string) { - this._assertValidIndexAndCollection(index); - - let body: estypes.CatAliasesResponse; - - try { - body = await this._client.cat.aliases({ format: "json" }); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const aliases = body.map(({ alias: name }) => name); - for (const alias of aliases) { - const indexName = this._extractIndex(alias); - - if (index === indexName) { - const indexType = - alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX - ? "private" - : "public"; - - throw kerror.get( - "services", - "storage", - "index_already_exists", - indexType, - index - ); - } - } - - await this._createHiddenCollection(index); - - return null; - } - - /** - * Creates an empty collection. - * Mappings and settings will be applied if supplied. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} config - mappings ({}), settings ({}) - * - * @returns {Promise} - */ - async createCollection( - index: string, - collection: string, - { - mappings = {}, - settings = {}, - }: { - mappings?: estypes.MappingTypeMapping; - settings?: Record; - } = {} - ) { - this._assertValidIndexAndCollection(index, collection); - - if (collection === HIDDEN_COLLECTION) { - throw kerror.get( - "services", - "storage", - "collection_reserved", - HIDDEN_COLLECTION - ); - } - - const mutex = new Mutex(`hiddenCollection/create/${index}`); - try { - await mutex.lock(); - - if (await this._hasHiddenCollection(index)) { - await this.deleteCollection(index, HIDDEN_COLLECTION); - } - } catch (error) { - throw this._esWrapper.formatESError(error); - } finally { - await mutex.unlock(); - } - - const esRequest: estypes.IndicesCreateRequest = { - aliases: { - [this._getAlias(index, collection)]: {}, - }, - index: await this._getAvailableIndice(index, collection), - mappings: {}, - settings, - wait_for_active_shards: await this._getWaitForActiveShards(), - }; - - this._checkDynamicProperty(mappings); - - const exists = await this.hasCollection(index, collection); - if (exists) { - return this.updateCollection(index, collection, { mappings, settings }); - } - - this._checkMappings(mappings); - - esRequest.mappings = { - _meta: mappings._meta || this._config.commonMapping._meta, - dynamic: mappings.dynamic || this._config.commonMapping.dynamic, - properties: _.merge( - mappings.properties, - this._config.commonMapping.properties - ), - }; - - esRequest.settings.number_of_replicas = - esRequest.settings.number_of_replicas || - this._config.defaultSettings.number_of_replicas; - - esRequest.settings.number_of_shards = - esRequest.settings.number_of_shards || - this._config.defaultSettings.number_of_shards; - - try { - await this._client.indices.create(esRequest); - } catch (error) { - if ( - _.get(error, "meta.body.error.type") === - "resource_already_exists_exception" - ) { - // race condition: the indice has been created between the "exists" - // check above and this "create" attempt - return null; - } - - throw this._esWrapper.formatESError(error); - } - - return null; - } - - /** - * Retrieves settings definition for index/type - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.<{ settings }>} - */ - async getSettings(index: string, collection: string) { - const indice = await this._getIndice(index, collection); - const esRequest: estypes.IndicesGetSettingsRequest = { - index: indice, - }; - - debug("Get settings: %o", esRequest); - - try { - const body = await this._client.indices.getSettings(esRequest); - - return body[indice].settings.index; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Retrieves mapping definition for index/type - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} options - includeKuzzleMeta (false) - * - * @returns {Promise.<{ dynamic, _meta, properties }>} - */ - async getMapping( - index: string, - collection: string, - { - includeKuzzleMeta = false, - }: { - includeKuzzleMeta?: boolean; - } = {} - ) { - const indice = await this._getIndice(index, collection); - const esRequest: estypes.IndicesGetMappingRequest = { - index: indice, - }; - - debug("Get mapping: %o", esRequest); - - try { - const body = await this._client.indices.getMapping(esRequest); - - const properties = includeKuzzleMeta - ? body[indice].mappings.properties - : _.omit(body[indice].mappings.properties, "_kuzzle_info"); - - return { - _meta: body[indice].mappings._meta, - dynamic: body[indice].mappings.dynamic, - properties, - }; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Updates a collection mappings and settings - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} config - mappings ({}), settings ({}) - * - * @returns {Promise} - */ - async updateCollection( - index: string, - collection: string, - { - mappings = {}, - settings = {}, - }: { - mappings?: estypes.MappingTypeMapping; - settings?: Record; - } = {} - ) { - const esRequest: estypes.IndicesGetSettingsRequest = { - index: await this._getIndice(index, collection), - }; - - // If either the putMappings or the putSettings operation fail, we need to - // rollback the whole operation. Since mappings can't be rollback, we try to - // update the settings first, then the mappings and we rollback the settings - // if putMappings fail. - let indexSettings; - - try { - indexSettings = await this._getSettings(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - if (!_.isEmpty(settings)) { - await this.updateSettings(index, collection, settings); - } - - try { - if (!_.isEmpty(mappings)) { - const previousMappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - - await this.updateMapping(index, collection, mappings); - - if (this._dynamicChanges(previousMappings, mappings)) { - await this.updateSearchIndex(index, collection); - } - } - } catch (error) { - const allowedSettings = this.getAllowedIndexSettings(indexSettings); - - // Rollback to previous settings - if (!_.isEmpty(settings)) { - await this.updateSettings(index, collection, allowedSettings); - } - - throw error; - } - - return null; - } - - /** - * Given index settings we return a new version of index settings - * only with allowed settings that can be set (during update or create index). - * @param indexSettings the index settings - * @returns {{index: *}} a new index settings with only allowed settings. - */ - getAllowedIndexSettings(indexSettings) { - return { - index: _.omit(indexSettings.index, [ - "creation_date", - "provided_name", - "uuid", - "version", - ]), - }; - } - - /** - * Sends an empty UpdateByQuery request to update the search index - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @returns {Promise.} {} - */ - async updateSearchIndex(index: string, collection: string) { - const esRequest: estypes.UpdateByQueryRequest = { - // @cluster: conflicts when two nodes start at the same time - conflicts: "proceed", - index: this._getAlias(index, collection), - refresh: true, - // This operation can take some time: this should be an ES - // background task. And it's preferable to a request timeout when - // processing large indexes. - wait_for_completion: false, - }; - - debug("UpdateByQuery: %o", esRequest); - - try { - await this._client.updateByQuery(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Update a collection mappings - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} mappings - Collection mappings in ES format - * - * @returns {Promise.<{ dynamic, _meta, properties }>} - */ - async updateMapping( - index: string, - collection: string, - mappings: estypes.MappingTypeMapping = {} - ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { - let esRequest: estypes.IndicesPutMappingRequest = { - index: this._getAlias(index, collection), - }; - - this._checkDynamicProperty(mappings); - - const collectionMappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - - this._checkMappings(mappings); - - esRequest = { - ...esRequest, - _meta: mappings._meta || collectionMappings._meta, - dynamic: mappings.dynamic || collectionMappings.dynamic, - properties: mappings.properties, - }; - - debug("Update mapping: %o", esRequest); - - try { - await this._client.indices.putMapping(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const fullProperties = _.merge( - collectionMappings.properties, - mappings.properties - ); - - return { - _meta: esRequest._meta, - dynamic: esRequest.dynamic.toString(), - properties: fullProperties, - }; - } - - /** - * Updates a collection settings (eg: analyzers) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} settings - Collection settings in ES format - * - * @returns {Promise} - */ - async updateSettings(index, collection, settings = {}) { - const esRequest = { - index: this._getAlias(index, collection), - }; - - await this._client.indices.close(esRequest); - - try { - await this._client.indices.putSettings({ ...esRequest, body: settings }); - } catch (error) { - throw this._esWrapper.formatESError(error); - } finally { - await this._client.indices.open(esRequest); - } - - return null; - } - - /** - * Empties the content of a collection. Keep the existing mapping and settings. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise} - */ - async truncateCollection(index: string, collection: string) { - let mappings; - let settings; - - const esRequest = { - index: await this._getIndice(index, collection), - }; - - try { - mappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - settings = await this._getSettings(esRequest); - settings = { - ...settings, - ...this.getAllowedIndexSettings(settings), - }; - await this._client.indices.delete(esRequest); - - await this._client.indices.create({ - ...esRequest, - aliases: { - [this._getAlias(index, collection)]: {}, - }, - mappings, - settings, - wait_for_active_shards: await this._getWaitForActiveShards(), - }); - - return null; - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Runs several action and document - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents to import - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.<{ items, errors }> - */ - async import( - index: string, - collection: string, - documents: JSONObject[], - { - refresh, - timeout, - userId = null, - }: { - refresh?: boolean | "wait_for"; - timeout?: string; - userId?: string; - } = {} - ) { - const alias = this._getAlias(index, collection); - const dateNow = Date.now(); - const esRequest: estypes.BulkRequest = { - operations: documents, - refresh, - timeout, - }; - - const kuzzleMeta = { - created: { - author: getKuid(userId), - createdAt: dateNow, - updatedAt: null, - updater: null, - }, - updated: { - updatedAt: dateNow, - updater: getKuid(userId), - }, - }; - - assertWellFormedRefresh(esRequest); - this._scriptCheck(documents); - - this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); - - let body: estypes.BulkResponse; - - try { - body = await this._client.bulk(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const result = { - errors: [], - items: [], - }; - - let idx = 0; - - /** - * @warning Critical code section - * - * bulk body can contain more than 10K elements - */ - for (let i = 0; i < body.items.length; i++) { - const row = body.items[i]; - const action = Object.keys(row)[0]; - const item = row[action]; - - if (item.status >= 400) { - const error: KImportError = { - _id: item._id, - status: item.status, - }; - - // update action contain body in "doc" field - // the delete action is not followed by an action payload - if (action === "update") { - error._source = documents[idx + 1].doc; - error._source._kuzzle_info = undefined; - } else if (action !== "delete") { - error._source = documents[idx + 1]; - error._source._kuzzle_info = undefined; - } - - // ES response does not systematicaly include an error object - // (e.g. delete action with 404 status) - if (item.error) { - error.error = { - reason: item.error.reason, - type: item.error.type, - }; - } - - result.errors.push({ [action]: error }); - } else { - result.items.push({ - [action]: { - _id: item._id, - status: item.status, - }, - }); - } - - // the delete action is not followed by an action payload - idx = action === "delete" ? idx + 1 : idx + 2; - } - /* end critical code section */ - - return result; - } - - /** - * Retrieves the complete list of existing collections in the current index - * - * @param {String} index - Index name - * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results - * - * @returns {Promise.} Collection names - */ - async listCollections(index, { includeHidden = false } = {}) { - let body: estypes.CatAliasesResponse; - - try { - body = await this._client.cat.aliases({ format: "json" }); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const aliases = body.map(({ alias }) => alias); - - const schema = this._extractSchema(aliases, { includeHidden }); - - return schema[index] || []; - } - - /** - * Retrieves the complete list of indexes - * - * @returns {Promise.} Index names - */ - async listIndexes() { - let body: estypes.CatAliasesResponse; - - try { - body = await this._client.cat.aliases({ format: "json" }); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const aliases = body.map(({ alias }) => alias); - - const schema = this._extractSchema(aliases); - - return Object.keys(schema); - } - - /** - * Returns an object containing the list of indexes and collections - * - * @returns {Object.} Object - */ - async getSchema() { - let body: estypes.CatAliasesResponse; - - try { - body = await this._client.cat.aliases({ format: "json" }); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const aliases = body.map(({ alias }) => alias); - - const schema = this._extractSchema(aliases, { includeHidden: true }); - - for (const [index, collections] of Object.entries(schema)) { - schema[index] = (collections as string[]).filter( - (c) => c !== HIDDEN_COLLECTION - ); - } - - return schema; - } - - /** - * Retrieves the complete list of aliases - * - * @returns {Promise.} [ { alias, index, collection, indice } ] - */ - async listAliases() { - let body: estypes.CatAliasesResponse; - - try { - body = await this._client.cat.aliases({ format: "json" }); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - const aliases = []; - - for (const { alias, index: indice } of body) { - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { - aliases.push({ - alias, - collection: this._extractCollection(alias), - index: this._extractIndex(alias), - indice, - }); - } - } - return aliases; - } - - /** - * Deletes a collection - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise} - */ - async deleteCollection(index: string, collection: string): Promise { - const indice = await this._getIndice(index, collection); - const esRequest: estypes.IndicesDeleteRequest = { - index: indice, - }; - - try { - await this._client.indices.delete(esRequest); - const alias = this._getAlias(index, collection); - - if (await this._checkIfAliasExists(alias)) { - await this._client.indices.deleteAlias({ - index: indice, - name: alias, - }); - } - - await this._createHiddenCollection(index); - } catch (e) { - throw this._esWrapper.formatESError(e); - } - - return null; - } - - /** - * Deletes multiple indexes - * - * @param {String[]} indexes - Index names - * - * @returns {Promise.} - */ - async deleteIndexes(indexes: string[] = []) { - if (indexes.length === 0) { - return Bluebird.resolve([]); - } - const deleted = new Set(); - - try { - const body = await this._client.cat.aliases({ format: "json" }); - - const esRequest = body.reduce( - (request, { alias, index: indice }) => { - const index = this._extractIndex(alias); - - if ( - alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || - !indexes.includes(index) - ) { - return request; - } - - deleted.add(index); - request.index.push(indice); - - return request; - }, - { index: [] } - ); - - if (esRequest.index.length === 0) { - return []; - } - - debug("Delete indexes: %o", esRequest); - - await this._client.indices.delete(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - return Array.from(deleted); - } - - /** - * Deletes an index - * - * @param {String} index - Index name - * - * @returns {Promise} - */ - async deleteIndex(index: string): Promise { - await this.deleteIndexes([index]); - - return null; - } - - /** - * Forces a refresh on the collection. - * - * /!\ Can lead to some performance issues. - * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.} { _shards } - */ - async refreshCollection(index: string, collection: string) { - const esRequest: estypes.IndicesRefreshRequest = { - index: this._getAlias(index, collection), - }; - - let body: estypes.IndicesRefreshResponse; - - try { - body = await this._client.indices.refresh(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - - return body; - } - - /** - * Returns true if the document exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document ID - * - * @returns {Promise.} - */ - async exists( - index: string, - collection: string, - id: string - ): Promise { - const esRequest: estypes.ExistsRequest = { - id, - index: this._getAlias(index, collection), - }; - - try { - return await this._client.exists(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Returns the list of documents existing with the ids given in the body param - * NB: Due to internal Kuzzle mechanism, can only be called on a single - * index/collection, using the body { ids: [.. } syntax. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Document IDs - * - * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} - */ - async mExists(index: string, collection: string, ids: string[]) { - if (ids.length === 0) { - return { errors: [], item: [] }; - } - - const esRequest: estypes.MgetRequest = { - _source: "false", - docs: ids.map((_id) => ({ _id })), - index: this._getAlias(index, collection), - }; - - debug("mExists: %o", esRequest); - - let body: estypes.MgetResponse; - - try { - body = await this._client.mget(esRequest); // NOSONAR - } catch (e) { - throw this._esWrapper.formatESError(e); - } - - const errors = []; - const items = []; - - for (let i = 0; i < body.docs.length; i++) { - const doc = body.docs[i]; - - if (!("error" in doc) && doc.found) { - items.push(doc._id); - } else { - errors.push(doc._id); - } - } - - return { errors, items }; - } - - /** - * Returns true if the index exists - * - * @param {String} index - Index name - * - * @returns {Promise.} - */ - async hasIndex(index: string): Promise { - const indexes = await this.listIndexes(); - - return indexes.some((idx) => idx === index); - } - - /** - * Returns true if the collection exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.} - */ - async hasCollection(index: string, collection: string): Promise { - const collections = await this.listCollections(index); - - return collections.some((col: string) => col === collection); - } - - /** - * Returns true if the index has the hidden collection - * - * @param {String} index - Index name - * - * @returns {Promise.} - */ - async _hasHiddenCollection(index) { - const collections = await this.listCollections(index, { - includeHidden: true, - }); - - return collections.some((col) => col === HIDDEN_COLLECTION); - } - - /** - * Creates multiple documents at once. - * If a content has no id, one is automatically generated and assigned to it. - * If a content has a specified identifier, it is rejected if it already exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mCreate( - index: string, - collection: string, - documents: JSON[], - { - refresh, - timeout, - userId = null, - }: { - refresh?: boolean | "wait_for"; - timeout?: string; - userId?: string; - } = {} - ) { - const alias = this._getAlias(index, collection), - kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }, - { rejected, extractedDocuments, documentsToGet } = - this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); - - // prepare the mget request, but only for document having a specified id - const body = - documentsToGet.length > 0 - ? await this._client.mget({ - docs: documentsToGet, - index: alias, - }) - : { docs: [] }; - - const existingDocuments = body.docs; - const esRequest: estypes.BulkRequest = { - index: alias, - operations: [], - refresh, - timeout, - }; - const toImport = []; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { - const document = extractedDocuments[i]; - - // Documents are retrieved in the same order than we got them from user - if (typeof document._id === "string" && existingDocuments[idx]) { - const doc = existingDocuments[idx]; - - if (!("error" in doc) && doc.found) { - document._source._kuzzle_info = undefined; - - rejected.push({ - document: { - _id: document._id, - body: document._source, - }, - reason: "document already exists", - status: 400, - }); - } else { - esRequest.operations.push({ - index: { - _id: document._id, - _index: alias, - }, - }); - esRequest.operations.push(document._source); - - toImport.push(document); - } - idx++; - } else { - esRequest.operations.push({ index: { _index: alias } }); - esRequest.operations.push(document._source); - - toImport.push(document); - } - } - /* end critical code section */ - - return this._mExecute(esRequest, toImport, rejected); - } - - /** - * Creates or replaces multiple documents at once. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) - * - * @returns {Promise.<{ items, errors }> - */ - async mCreateOrReplace( - index: string, - collection: string, - documents: JSONObject[], - { - refresh, - timeout, - userId = null, - injectKuzzleMeta = true, - limits = true, - source = true, - }: KRequestParams = {} - ) { - let kuzzleMeta = {}; - - if (injectKuzzleMeta) { - kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }; - } - - const alias = this._getAlias(index, collection); - const esRequest: estypes.BulkRequest = { - index: alias, - operations: [], - refresh, - timeout, - }; - const { rejected, extractedDocuments } = this._extractMDocuments( - documents, - kuzzleMeta - ); - - esRequest.operations = []; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.operations.push({ - index: { - _id: extractedDocuments[i]._id, - _index: alias, - }, - }); - esRequest.operations.push(extractedDocuments[i]._source); - } - /* end critical code section */ - - return this._mExecute(esRequest, extractedDocuments, rejected, { - limits, - source, - }); - } - - /** - * Updates multiple documents with one request - * Replacements are rejected if targeted documents do not exist - * (like with the normal "update" method) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mUpdate( - index: string, - collection: string, - documents: JSONObject[], - { - refresh = undefined, - retryOnConflict = 0, - timeout = undefined, - userId = null, - } = {} - ) { - const alias = this._getAlias(index, collection), - toImport = [], - esRequest: estypes.BulkRequest = { - index: alias, - operations: [], - refresh, - timeout, - }, - kuzzleMeta = { - _kuzzle_info: { - updatedAt: Date.now(), - updater: getKuid(userId), - }, - }, - { rejected, extractedDocuments } = this._extractMDocuments( - documents, - kuzzleMeta - ); - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - const extractedDocument = extractedDocuments[i]; - - if (typeof extractedDocument._id === "string") { - esRequest.operations.push({ - update: { - _id: extractedDocument._id, - _index: alias, - retry_on_conflict: - retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }, - }); - - // _source: true => makes ES return the updated document source in the - // response. Required by the real-time notifier component - esRequest.operations.push({ - _source: true, - doc: extractedDocument._source, - }); - toImport.push(extractedDocument); - } else { - extractedDocument._source._kuzzle_info = undefined; - - rejected.push({ - document: { - _id: extractedDocument._id, - body: extractedDocument._source, - }, - reason: "document _id must be a string", - status: 400, - }); - } - } - /* end critical code section */ - - const response = await this._mExecute(esRequest, toImport, rejected); - - // with _source: true, ES returns the updated document in - // response.result.get._source - // => we replace response.result._source with it so that the notifier - // module can seamlessly process all kind of m* response* - response.items = response.items.map((item) => ({ - _id: item._id, - _source: item.get._source, - _version: item._version, - status: item.status, - })); - - return response; - } - - /** - * Creates or replaces multiple documents at once. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) - * - * @returns {Promise.<{ items, errors }> - */ - async mUpsert( - index: string, - collection: string, - documents: JSONObject[], - { - refresh, - retryOnConflict = 0, - timeout, - userId = null, - }: { - refresh?: boolean | "wait_for"; - retryOnConflict?: number; - timeout?: string; - userId?: string; - } = {} - ) { - const alias = this._getAlias(index, collection); - const esRequest: estypes.BulkRequest = { - operations: [], - refresh, - timeout, - }; - - const user = getKuid(userId); - const now = Date.now(); - const kuzzleMeta = { - doc: { - _kuzzle_info: { - updatedAt: now, - updater: user, - }, - }, - upsert: { - _kuzzle_info: { - author: user, - createdAt: now, - }, - }, - }; - - const { rejected, extractedDocuments } = this._extractMDocuments( - documents, - kuzzleMeta, - { - prepareMUpsert: true, - requireId: true, - } - ); - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.operations.push( - { - update: { - _id: extractedDocuments[i]._id, - _index: alias, - _source: true, - retry_on_conflict: - retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }, - }, - { - doc: extractedDocuments[i]._source.changes, - upsert: extractedDocuments[i]._source.default, - } - ); - // _source: true - // Makes ES return the updated document source in the response. - // Required by the real-time notifier component - } - /* end critical code section */ - - const response = await this._mExecute( - esRequest, - extractedDocuments, - rejected - ); - - // with _source: true, ES returns the updated document in - // response.result.get._source - // => we replace response.result._source with it so that the notifier - // module can seamlessly process all kind of m* response* - response.items = response.items.map((item) => ({ - _id: item._id, - _source: item.get._source, - _version: item._version, - created: item.result === "created", // Needed by the notifier - status: item.status, - })); - - return response; - } - - /** - * Replaces multiple documents at once. - * Replacements are rejected if targeted documents do not exist - * (like with the normal "replace" method) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mReplace( - index: string, - collection: string, - documents: JSONObject[], - { - refresh, - timeout, - userId = null, - }: { - refresh?: boolean | "wait_for"; - timeout?: string; - userId?: string; - } = {} - ) { - const alias = this._getAlias(index, collection), - kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }, - { rejected, extractedDocuments, documentsToGet } = - this._extractMDocuments(documents, kuzzleMeta, { - prepareMGet: true, - requireId: true, - }); - - if (documentsToGet.length < 1) { - return { errors: rejected, items: [] }; - } - - const body = await this._client.mget({ - docs: documentsToGet, - index: alias, - }); - - const existingDocuments = body.docs; - const esRequest: estypes.BulkRequest = { - operations: [], - refresh, - timeout, - }; - const toImport = []; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - const document = extractedDocuments[i]; - - // Documents are retrieved in the same order than we got them from user - const doc = existingDocuments[i]; - - if (!("error" in doc) && doc?.found) { - esRequest.operations.push({ - index: { - _id: document._id, - _index: alias, - }, - }); - esRequest.operations.push(document._source); - - toImport.push(document); - } else { - document._source._kuzzle_info = undefined; - - rejected.push({ - document: { - _id: document._id, - body: document._source, - }, - reason: "document not found", - status: 404, - }); - } - } - /* end critical code section */ - - return this._mExecute(esRequest, toImport, rejected); - } - - /** - * Deletes multiple documents with one request - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Documents IDs - * @param {Object} options - timeout (undefined), refresh (undefined) - * - * @returns {Promise.<{ documents, errors }> - */ - async mDelete( - index: string, - collection: string, - ids: string[], - { - refresh, - }: { - refresh?: boolean | "wait_for"; - timeout?: number; - } = {} - ) { - const query = { ids: { values: [] } }; - const validIds = []; - const partialErrors = []; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < ids.length; i++) { - const _id = ids[i]; - - if (typeof _id === "string") { - validIds.push(_id); - } else { - partialErrors.push({ - _id, - reason: "document _id must be a string", - status: 400, - }); - } - } - /* end critical code section */ - await this.refreshCollection(index, collection); - - const { items } = await this.mGet(index, collection, validIds); - - let idx = 0; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < validIds.length; i++) { - const validId = validIds[i]; - const item = items[idx]; - - if (item && item._id === validId) { - query.ids.values.push(validId); - idx++; - } else { - partialErrors.push({ - _id: validId, - reason: "document not found", - status: 404, - }); - } - } - /* end critical code section */ - - // @todo duplicated query to get documents body, mGet here and search in - // deleteByQuery - const { documents } = await this.deleteByQuery(index, collection, query, { - refresh, - }); - - return { documents, errors: partialErrors }; - } - - /** - * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments - * Returns a standardized ES response object, containing the list of - * successfully performed operations, and the rejected ones - * - * @param {Object} esRequest - Elasticsearch request - * @param {Object[]} documents - Document sources (format: {_id, _source}) - * @param {Object[]} partialErrors - pre-rejected documents - * @param {Object} options - limits (true) - * - * @returns {Promise.} results - */ - async _mExecute( - esRequest: estypes.BulkRequest, - documents: JSONObject[], - partialErrors: JSONObject[] = [], - { limits = true, source = true } = {} - ) { - assertWellFormedRefresh(esRequest); - - if (this._hasExceededLimit(limits, documents)) { - return kerror.reject("services", "storage", "write_limit_exceeded"); - } - - let body = { items: [] }; - - if (documents.length > 0) { - try { - body = await this._client.bulk(esRequest); - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - const successes = []; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < body.items.length; i++) { - const item = body.items[i]; - const result = item[Object.keys(item)[0]]; - - if (result.status >= 400) { - if (result.status === 404) { - partialErrors.push({ - document: { - _id: documents[i]._id, - body: documents[i]._source, - }, - reason: "document not found", - status: result.status, - }); - } else { - partialErrors.push({ - document: documents[i], - reason: result.error.reason, - status: result.status, - }); - } - } else { - successes.push({ - _id: result._id, - _source: source ? documents[i]._source : undefined, - _version: result._version, - created: result.result === "created", - get: result.get, - result: result.result, - status: result.status, // used by mUpdate to get the full document body - }); - } - } - /* end critical code section */ - - return { - errors: partialErrors, // @todo rename items to documents - items: successes, - }; - } - - /** - * Extracts, injects metadata and validates documents contained - * in a Request - * - * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace - * - * @param {Object[]} documents - Documents - * @param {Object} metadata - Kuzzle metadata - * @param {Object} options - prepareMGet (false), requireId (false) - * - * @returns {Object} { rejected, extractedDocuments, documentsToGet } - */ - _extractMDocuments( - documents: JSONObject[], - metadata: JSONObject, - { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} - ) { - const rejected = []; - const extractedDocuments = []; - const documentsToGet = []; - - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < documents.length; i++) { - const document = documents[i]; - - if (!isPlainObject(document.body) && !prepareMUpsert) { - rejected.push({ - document, - reason: "document body must be an object", - status: 400, - }); - } else if (!isPlainObject(document.changes) && prepareMUpsert) { - rejected.push({ - document, - reason: "document changes must be an object", - status: 400, - }); - } else if ( - prepareMUpsert && - document.default && - !isPlainObject(document.default) - ) { - rejected.push({ - document, - reason: "document default must be an object", - status: 400, - }); - } else if (requireId && typeof document._id !== "string") { - rejected.push({ - document, - reason: "document _id must be a string", - status: 400, - }); - } else { - this._processExtract( - prepareMUpsert, - prepareMGet, - metadata, - document, - extractedDocuments, - documentsToGet - ); - } - } - /* end critical code section */ - - return { documentsToGet, extractedDocuments, rejected }; - } - - private _hasExceededLimit(limits: boolean, documents: JSONObject[]) { - return ( - limits && - documents.length > global.kuzzle.config.limits.documentsWriteCount - ); - } - - private _processExtract( - prepareMUpsert: boolean, - prepareMGet: boolean, - metadata: JSONObject, - document: JSONObject, - extractedDocuments: JSONObject[], - documentsToGet: JSONObject[] - ) { - let extractedDocument; - - if (prepareMUpsert) { - extractedDocument = { - _source: { - // Do not use destructuring, it's 10x slower - changes: Object.assign({}, metadata.doc, document.changes), - default: Object.assign( - {}, - metadata.upsert, - document.changes, - document.default - ), - }, - }; - } else { - extractedDocument = { - // Do not use destructuring, it's 10x slower - _source: Object.assign({}, metadata, document.body), - }; - } - - if (document._id) { - extractedDocument._id = document._id; - } - - extractedDocuments.push(extractedDocument); - - if (prepareMGet && typeof document._id === "string") { - documentsToGet.push({ - _id: document._id, - _source: false, - }); - } - } - - /** - * Throws an error if the provided mapping is invalid - * - * @param {Object} mapping - * @throws - */ - _checkMappings(mapping: JSONObject, path = [], check = true) { - const properties = Object.keys(mapping); - const mappingProperties = - path.length === 0 - ? ROOT_MAPPING_PROPERTIES - : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; - - for (const property of properties) { - if (check && !mappingProperties.includes(property)) { - const currentPath = [...path, property].join("."); - - throw kerror.get( - "services", - "storage", - "invalid_mapping", - currentPath, - didYouMean(property, mappingProperties) - ); - } - - if (property === "properties") { - // type definition level, we don't check - this._checkMappings(mapping[property], [...path, "properties"], false); - } else if (mapping[property]?.properties) { - // root properties level, check for "properties", "dynamic" and "_meta" - this._checkMappings(mapping[property], [...path, property], true); - } - } - } - - /** - * Given index + collection, returns the associated alias name. - * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Alias name (eg: '@&nepali.liia') - */ - _getAlias(index, collection) { - return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - } - - /** - * Given an alias name, returns the associated index name. - */ - async _checkIfAliasExists(aliasName) { - return this._client.indices.existsAlias({ - name: aliasName, - }); - } - - /** - * Given index + collection, returns the associated indice name. - * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Indice name (eg: '&nepali.liia') - * @throws If there is not exactly one indice associated - */ - async _getIndice(index: string, collection: string): Promise { - const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - const body = await this._client.cat.aliases({ - format: "json", - name: alias, - }); - - if (body.length < 1) { - throw kerror.get("services", "storage", "unknown_index_collection"); - } else if (body.length > 1) { - throw kerror.get( - "services", - "storage", - "multiple_indice_alias", - `"alias" starting with "${ALIAS_PREFIX}"`, - '"indices"' - ); - } - - return body[0].index; - } - - /** - * Given an ES Request returns the settings of the corresponding indice. - * - * @param esRequest the ES Request with wanted settings. - * @return {Promise<*>} the settings of the indice. - * @private - */ - async _getSettings( - esRequest: estypes.IndicesGetSettingsRequest - ): Promise { - const response = await this._client.indices.getSettings(esRequest); - const index = esRequest.index as string; - - return response[index].settings; - } - - /** - * Given index + collection, returns an available indice name. - * Use this function when creating the associated indice. Otherwise use `_getAlias`. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Available indice name (eg: '&nepali.liia2') - */ - async _getAvailableIndice( - index: string, - collection: string - ): Promise { - let indice = this._getAlias(index, collection).substring( - INDEX_PREFIX_POSITION_IN_ALIAS - ); - - if (!(await this._client.indices.exists({ index: indice }))) { - return indice; - } - - let notAvailable; - let suffix; - do { - suffix = `.${randomNumber(100000)}`; - - const overflow = Buffer.from(indice + suffix).length - 255; - if (overflow > 0) { - const indiceBuffer = Buffer.from(indice); - indice = indiceBuffer - .subarray(0, indiceBuffer.length - overflow) - .toString(); - } - - notAvailable = await this._client.indices.exists({ - index: indice + suffix, - }); - } while (notAvailable); - - return indice + suffix; - } - - /** - * Given an indice, returns the associated alias name. - * - * @param {String} indice - * - * @returns {String} Alias name (eg: '@&nepali.liia') - * @throws If there is not exactly one alias associated that is prefixed with @ - */ - async _getAliasFromIndice(indice) { - const body = await this._client.indices.getAlias({ index: indice }); - const aliases = Object.keys(body[indice].aliases).filter((alias) => - alias.startsWith(ALIAS_PREFIX) - ); - - if (aliases.length < 1) { - throw kerror.get("services", "storage", "unknown_index_collection"); - } - - return aliases; - } - - /** - * Check for each indice whether it has an alias or not. - * When the latter is missing, create one based on the indice name. - * - * This check avoids a breaking change for those who were using Kuzzle before - * alias attribution for each indice turned into a standard (appear in 2.14.0). - */ - async generateMissingAliases() { - try { - const body = await this._client.cat.indices({ format: "json" }); - const indices = body.map(({ index: indice }) => indice); - const aliases = await this.listAliases(); - - const indicesWithoutAlias = indices.filter( - (indice) => - indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice) - ); - - const esRequest = { body: { actions: [] } }; - for (const indice of indicesWithoutAlias) { - esRequest.body.actions.push({ - add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, - }); - } - - if (esRequest.body.actions.length > 0) { - await this._client.indices.updateAliases(esRequest); - } - } catch (error) { - throw this._esWrapper.formatESError(error); - } - } - - /** - * Throws if index or collection includes forbidden characters - * - * @param {String} index - * @param {String} collection - */ - _assertValidIndexAndCollection(index, collection = null) { - if (!this.isIndexNameValid(index)) { - throw kerror.get("services", "storage", "invalid_index_name", index); - } - - if (collection !== null && !this.isCollectionNameValid(collection)) { - throw kerror.get( - "services", - "storage", - "invalid_collection_name", - collection - ); - } - } - - /** - * Given an alias, extract the associated index. - * - * @param {String} alias - * - * @returns {String} Index name - */ - _extractIndex(alias) { - return alias.substr( - INDEX_PREFIX_POSITION_IN_ALIAS + 1, - alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 - ); - } - - /** - * Given an alias, extract the associated collection. - * - * @param {String} alias - * - * @returns {String} Collection name - */ - _extractCollection(alias) { - const separatorPos = alias.indexOf(NAME_SEPARATOR); - - return alias.substr(separatorPos + 1, alias.length); - } - - /** - * Given aliases, extract indexes and collections. - * - * @param {Array.} aliases - * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. - * - * @returns {Object.} Indexes as key and an array of their collections as value - */ - _extractSchema(aliases, { includeHidden = false } = {}) { - const schema = {}; - - for (const alias of aliases) { - const [indexName, collectionName] = alias - .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) - .split(NAME_SEPARATOR); - - if ( - alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && - (collectionName !== HIDDEN_COLLECTION || includeHidden) - ) { - if (!schema[indexName]) { - schema[indexName] = []; - } - - if (!schema[indexName].includes(collectionName)) { - schema[indexName].push(collectionName); - } - } - } - - return schema; - } - - /** - * Creates the hidden collection on the provided index if it does not already - * exists - * - * @param {String} index Index name - */ - async _createHiddenCollection(index) { - const mutex = new Mutex(`hiddenCollection/${index}`); - - try { - await mutex.lock(); - - if (await this._hasHiddenCollection(index)) { - return; - } - - const esRequest: estypes.IndicesCreateRequest = { - aliases: { - [this._getAlias(index, HIDDEN_COLLECTION)]: {}, - }, - index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), - settings: { - number_of_replicas: this._config.defaultSettings.number_of_replicas, - number_of_shards: this._config.defaultSettings.number_of_shards, - }, - wait_for_active_shards: await this._getWaitForActiveShards(), - }; - - await this._client.indices.create(esRequest); - } catch (e) { - throw this._esWrapper.formatESError(e); - } finally { - await mutex.unlock(); - } - } - - /** - * We need to always wait for a minimal number of shards to be available - * before answering to the client. This is to avoid Elasticsearch node - * to return a 404 Not Found error when the client tries to index a - * document in the index. - * To find the best value for this setting, we need to take into account - * the number of nodes in the cluster and the number of shards per index. - */ - async _getWaitForActiveShards(): Promise { - const body = await this._client.cat.nodes({ format: "json" }); - - const numberOfNodes = body.length; - - if (numberOfNodes > 1) { - return "all"; - } - - return 1; - } - - /** - * Scroll indice in elasticsearch and return all document that match the filter - * /!\ throws a write_limit_exceed error: this method is intended to be used - * by deleteByQuery and updateByQuery - * - * @param {Object} esRequest - Search request body - * - * @returns {Promise.} resolve to an array of documents - */ - async _getAllDocumentsFromQuery(esRequest: estypes.SearchRequest) { - let { hits, _scroll_id } = await this._client.search(esRequest); - const totalHitsValue = this._getHitsTotalValue(hits); - - if (totalHitsValue > global.kuzzle.config.limits.documentsWriteCount) { - throw kerror.get("services", "storage", "write_limit_exceeded"); - } - - let documents = hits.hits.map((h: JSONObject) => ({ - _id: h._id, - _source: h._source, - body: {}, - })); - - while (totalHitsValue !== documents.length) { - ({ hits, _scroll_id } = await this._client.scroll({ - scroll: esRequest.scroll, - scroll_id: _scroll_id, - })); - - documents = documents.concat( - hits.hits.map((h: JSONObject) => ({ - _id: h._id, - _source: h._source, - body: {}, - })) - ); - } - - await this.clearScroll(_scroll_id); - - return documents; - } - - /** - * Clean and normalize the searchBody - * Ensure only allowed parameters are passed to ES - * - * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) - */ - _sanitizeSearchBody(searchBody) { - // Only allow a whitelist of top level properties - for (const key of Object.keys(searchBody)) { - if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { - throw kerror.get("services", "storage", "invalid_search_query", key); - } - } - - // Ensure that the body does not include a script - this._scriptCheck(searchBody); - - // Avoid empty queries that causes ES to respond with an error. - // Empty queries are turned into match_all queries - if (_.isEmpty(searchBody.query)) { - searchBody.query = { match_all: {} }; - } - - return searchBody; - } - - /** - * Throw if a script is used in the query. - * - * Only Stored Scripts are accepted - * - * @param {Object} object - */ - _scriptCheck(object) { - for (const [key, value] of Object.entries(object)) { - if (this.scriptKeys.includes(key)) { - for (const scriptArg of Object.keys(value)) { - if (!this.scriptAllowedArgs.includes(scriptArg)) { - throw kerror.get( - "services", - "storage", - "invalid_query_keyword", - `${key}.${scriptArg}` - ); - } - } - } - // Every object must be checked here, even the ones nested into an array - else if (typeof value === "object" && value !== null) { - this._scriptCheck(value); - } - } - } - - /** - * Checks if a collection name is valid - * @param {string} name - * @returns {Boolean} - */ - isCollectionNameValid(name) { - return _isObjectNameValid(name); - } - - /** - * Checks if a collection name is valid - * @param {string} name - * @returns {Boolean} - */ - isIndexNameValid(name) { - return _isObjectNameValid(name); - } - - /** - * Clears an allocated scroll - * @param {[type]} id [description] - * @returns {[type]} [description] - */ - async clearScroll(id?: string) { - if (id) { - debug("clearing scroll: %s", id); - await this._client.clearScroll({ scroll_id: id }); - } - } - - /** - * Loads a configuration value from services.storageEngine and assert a valid - * ms format. - * - * @param {String} key - relative path to the key in configuration - * - * @returns {Number} milliseconds - */ - _loadMsConfig(key) { - const configValue = _.get(this._config, key); - - assert( - typeof configValue === "string", - `services.storageEngine.${key} must be a string.` - ); - - const parsedValue = ms(configValue); - - assert( - typeof parsedValue === "number", - `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` - ); - - return parsedValue; - } - - /** - * Returns true if one of the mappings dynamic property changes value from - * false to true - */ - _dynamicChanges(previousMappings, newMappings) { - const previousValues = findDynamic(previousMappings); - - for (const [path, previousValue] of Object.entries(previousValues)) { - if (previousValue.toString() !== "false") { - continue; - } - - const newValue = _.get(newMappings, path); - - if (newValue && newValue.toString() !== "false") { - return true; - } - } - - return false; - } - - async waitForElasticsearch() { - if (esState !== esStateEnum.NONE) { - while (esState !== esStateEnum.OK) { - await Bluebird.delay(1000); - } - - return; - } - - esState = esStateEnum.AWAITING; - - global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); - - while (esState !== esStateEnum.OK) { - try { - // Wait for at least 1 shard to be initialized - const health = await this._client.cluster.health({ - wait_for_no_initializing_shards: true, - }); - - if (health.number_of_pending_tasks === 0) { - global.kuzzle.log.info("[✔] Elasticsearch is ready"); - esState = esStateEnum.OK; - } else { - global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining` - ); - await Bluebird.delay(1000); - } - } catch (e) { - await Bluebird.delay(1000); - } - } - } - - /** - * Checks if the dynamic properties are correct - */ - _checkDynamicProperty(mappings) { - const dynamicProperties = findDynamic(mappings); - for (const [path, value] of Object.entries(dynamicProperties)) { - // Prevent common mistake - if (typeof value === "boolean") { - _.set(mappings, path, value.toString()); - } else if (typeof value !== "string") { - throw kerror.get( - "services", - "storage", - "invalid_mapping", - path, - "Dynamic property value should be a string." - ); - } - - if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { - throw kerror.get( - "services", - "storage", - "invalid_mapping", - path, - `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( - '", "' - )}"` - ); - } - } - } - - _setLastActionToKuzzleMeta( - esRequest: JSONObject, - alias: string, - kuzzleMeta: JSONObject - ) { - /** - * @warning Critical code section - * - * bulk body can contain more than 10K elements - */ - let lastAction = ""; - const actionNames = ["index", "create", "update", "delete"]; - - for (let i = 0; i < esRequest.operations.length; i++) { - const item = esRequest.operations[i]; - const action = Object.keys(item)[0]; - - if (actionNames.indexOf(action) !== -1) { - lastAction = action; - - item[action]._index = alias; - - if (item[action]?._type) { - item[action]._type = undefined; - } - } else if (lastAction === "index" || lastAction === "create") { - item._kuzzle_info = kuzzleMeta.created; - } else if (lastAction === "update") { - this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); - } - } - /* end critical code section */ - } - - _setLastActionToKuzzleMetaUpdate(item: JSONObject, kuzzleMeta: JSONObject) { - for (const prop of ["doc", "upsert"]) { - if (isPlainObject(item[prop])) { - item[prop]._kuzzle_info = kuzzleMeta.updated; - } - } - } - - _getHitsTotalValue(hits: estypes.SearchHitsMetadata): number { - if (typeof hits.total === "number") { - return hits.total; - } - - return hits.total.value; + _initSequence() { + return this._client._initSequence(); } } - -/** - * Finds paths and values of mappings dynamic properties - * - * @example - * - * findDynamic(mappings); - * { - * "properties.metadata.dynamic": "true", - * "properties.user.properties.address.dynamic": "strict" - * } - */ -function findDynamic(mappings, path = [], results = {}) { - if (mappings.dynamic !== undefined) { - results[path.concat("dynamic").join(".")] = mappings.dynamic; - } - - for (const [key, value] of Object.entries(mappings)) { - if (isPlainObject(value)) { - findDynamic(value, path.concat(key), results); - } - } - - return results; -} - -/** - * Forbids the use of the _routing ES option - * - * @param {Object} esRequest - * @throws - */ -function assertNoRouting(esRequest) { - if (esRequest._routing) { - throw kerror.get("services", "storage", "no_routing"); - } -} - -/** - * Checks if the optional "refresh" argument is well-formed - * - * @param {Object} esRequest - * @throws - */ -function assertWellFormedRefresh(esRequest) { - if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { - throw kerror.get( - "services", - "storage", - "invalid_argument", - "refresh", - '"wait_for", false' - ); - } -} - -function getKuid(userId: string): string | null { - if (!userId) { - return null; - } - - return String(userId); -} - -/** - * Checks if an index or collection name is valid - * - * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html - * - * Beware of the length check: ES allows indice names up to 255 bytes, but since - * in Kuzzle we emulate collections as indices, we have to make sure - * that the privacy prefix, the index name, the separator and the collection - * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way - * is to limit index and collection names to 126 bytes and document that - * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) - * - * @param {string} name - * @returns {Boolean} - */ -function _isObjectNameValid(name: string): boolean { - if (typeof name !== "string" || name.length === 0) { - return false; - } - - if (name.toLowerCase() !== name) { - return false; - } - - if (Buffer.from(name).length > 126) { - return false; - } - - if (name === "_all") { - return false; - } - - let valid = true; - - for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { - valid = !name.includes(FORBIDDEN_CHARS[i]); - } - - return valid; -} - -// TODO: Remove this function when we move to Jest -// This is kept because we use an old ReRequire that use require() instead of import -module.exports = ElasticSearch; diff --git a/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts b/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts index 96cb65f645..c810429371 100644 --- a/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts +++ b/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts @@ -1,6 +1,9 @@ -import { ClientOptions } from "@elastic/elasticsearch"; - export type StorageEngineElasticsearch = { + /** + * Elasticsearch major version + * @default 7 + */ + majorVersion: 7 | 8; /** * @default ['storageEngine'] */ @@ -25,7 +28,7 @@ export type StorageEngineElasticsearch = { * } * */ - client: ClientOptions; + client: any; /** * Default policy against new fields that are not referenced in the diff --git a/lib/types/storage/7/Elasticsearch.js b/lib/types/storage/7/Elasticsearch.js new file mode 100644 index 0000000000..12de03811f --- /dev/null +++ b/lib/types/storage/7/Elasticsearch.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Elasticsearch.js.map \ No newline at end of file diff --git a/lib/types/storage/7/Elasticsearch.ts b/lib/types/storage/7/Elasticsearch.ts new file mode 100644 index 0000000000..492fca69ae --- /dev/null +++ b/lib/types/storage/7/Elasticsearch.ts @@ -0,0 +1,42 @@ +import { ByteSize, ClusterNodesStats } from "sdk-es7/api/types"; + +export type InfoResult = { + type: string; + version: string; + status?: string; + lucene?: string; + spaceUsed?: ByteSize; + nodes?: ClusterNodesStats; +}; + +export type KRequestBody = T & { + _kuzzle_info?: { + author: string; + createdAt: number; + updatedAt: number | null; + updater: string | null; + }; +}; + +export interface JSONObject { + [key: string]: any; +} + +export type KImportError = { + _id: string; + status: string; + _source?: JSONObject; + error?: { + reason: string; + type: string; + }; +}; + +export type KRequestParams = { + refresh?: boolean | "wait_for"; + timeout?: string; + userId?: string; + injectKuzzleMeta?: boolean; + limits?: boolean; + source?: boolean; +}; diff --git a/lib/types/storage/8/Elasticsearch.js b/lib/types/storage/8/Elasticsearch.js new file mode 100644 index 0000000000..12de03811f --- /dev/null +++ b/lib/types/storage/8/Elasticsearch.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Elasticsearch.js.map \ No newline at end of file diff --git a/lib/types/storage/Elasticsearch.ts b/lib/types/storage/8/Elasticsearch.ts similarity index 95% rename from lib/types/storage/Elasticsearch.ts rename to lib/types/storage/8/Elasticsearch.ts index f06f48f86f..4f188e9b11 100644 --- a/lib/types/storage/Elasticsearch.ts +++ b/lib/types/storage/8/Elasticsearch.ts @@ -1,4 +1,4 @@ -import { estypes } from "@elastic/elasticsearch"; +import { estypes } from "sdk-es8"; export type InfoResult = { type: string; diff --git a/package-lock.json b/package-lock.json index d4562aaf22..32129108f3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,6 @@ "version": "2.29.0", "license": "Apache-2.0", "dependencies": { - "@elastic/elasticsearch": "8.11.0", "aedes": "0.46.3", "bluebird": "3.7.2", "cli-color": "2.0.3", @@ -40,6 +39,8 @@ "passport": "0.7.0", "protobufjs": "7.2.5", "rc": "1.2.8", + "sdk-es7": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", + "sdk-es8": "npm:@elastic/elasticsearch@8.12.1", "semver": "7.5.4", "sorted-array": "2.0.4", "uuid": "9.0.1", @@ -1094,9 +1095,9 @@ } }, "node_modules/@elastic/elasticsearch": { - "version": "8.11.0", - "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.11.0.tgz", - "integrity": "sha512-1UEQFdGLuKdROLJnMTjegasRM3X9INm/PVADoIVgdTfuv6DeJ17UMuNwYSkCrLrC0trLjjGV4YganpbJJX/VLg==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.12.1.tgz", + "integrity": "sha512-/dJtxtvoN2vRXip6xUrEyzthhzVUOKL8L9YNq25HpMwqiqrJTK70/dOp6GM8oTVQ87UPyJBiiCxQY2+cvg2XWw==", "dependencies": { "@elastic/transport": "^8.4.0", "tslib": "^2.4.0" @@ -1353,6 +1354,12 @@ "node": ">=8" } }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", @@ -1887,9 +1894,9 @@ } }, "node_modules/@octokit/request": { - "version": "8.1.6", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.1.6.tgz", - "integrity": "sha512-YhPaGml3ncZC1NfXpP3WZ7iliL1ap6tLkAp6MvbK2fTTPytzVUyUesBBogcdMm86uRYO5rHaM1xIWxigWZ17MQ==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.2.0.tgz", + "integrity": "sha512-exPif6x5uwLqv1N1irkLG1zZNJkOtj8bZxuVHd71U5Ftuxf2wGNvAJyNBcPbPC+EBzwYEbBDdSFb8EPcjpYxPQ==", "dev": true, "peer": true, "dependencies": { @@ -2338,13 +2345,13 @@ } }, "node_modules/@semantic-release/github/node_modules/globby": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.0.tgz", - "integrity": "sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.1.tgz", + "integrity": "sha512-jOMLD2Z7MAhyG8aJpNOpmziMOP4rPLcc95oQPKXBazW82z+CEgPFBQvEpRUa1KeIMUJo4Wsm+q6uzO/Q/4BksQ==", "dev": true, "peer": true, "dependencies": { - "@sindresorhus/merge-streams": "^1.0.0", + "@sindresorhus/merge-streams": "^2.1.0", "fast-glob": "^3.3.2", "ignore": "^5.2.4", "path-type": "^5.0.0", @@ -2359,9 +2366,9 @@ } }, "node_modules/@semantic-release/github/node_modules/http-proxy-agent": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz", - "integrity": "sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.1.tgz", + "integrity": "sha512-My1KCEPs6A0hb4qCVzYp8iEvA8j8YqcvXLZZH8C9OFuTYpYjHE7N2dtG3mRl1HMD4+VGXpF3XcDVcxGBT7yDZQ==", "dev": true, "peer": true, "dependencies": { @@ -2373,9 +2380,9 @@ } }, "node_modules/@semantic-release/github/node_modules/https-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz", - "integrity": "sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.3.tgz", + "integrity": "sha512-kCnwztfX0KZJSLOBrcL0emLeFako55NWMovvyPP2AjsghNk9RB1yjSI+jVumPHYZsNXegNoqupSW9IY3afSH8w==", "dev": true, "peer": true, "dependencies": { @@ -2872,9 +2879,9 @@ } }, "node_modules/@sindresorhus/merge-streams": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-1.0.0.tgz", - "integrity": "sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.2.0.tgz", + "integrity": "sha512-UTce8mUwUW0RikMb/eseJ7ys0BRkZVFB86orHzrfW12ZmFtym5zua8joZ4L7okH2dDFHkcFjqnZ5GocWBXOFtA==", "dev": true, "peer": true, "engines": { @@ -3078,9 +3085,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.11.10", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.10.tgz", - "integrity": "sha512-rZEfe/hJSGYmdfX9tvcPMYeYPW2sNl50nsw4jZmRcaG0HIAb0WYEpsB05GOb53vjqpyE9GUhlDQ4jLSoB5q9kg==", + "version": "20.11.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.17.tgz", + "integrity": "sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw==", "dependencies": { "undici-types": "~5.26.4" } @@ -3108,9 +3115,9 @@ "dev": true }, "node_modules/@types/semver": { - "version": "7.5.6", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", - "integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.7.tgz", + "integrity": "sha512-/wdoPq1QqkSj9/QOeKkFquEuPzQbHTWAMPH/PaUMB+JuR31lXhlWXRZ52IpfDYVlDOUBvX09uBrPwxGT1hjNBg==", "dev": true }, "node_modules/@types/stack-utils": { @@ -3604,13 +3611,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/ansicolors": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz", - "integrity": "sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==", - "dev": true, - "peer": true - }, "node_modules/any-promise": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", @@ -3723,13 +3723,16 @@ "peer": true }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "optional": true, "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3751,17 +3754,18 @@ } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", - "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "optional": true, "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" }, "engines": { @@ -3894,9 +3898,9 @@ } }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.6.tgz", + "integrity": "sha512-j1QzY8iPNPG4o4xmO3ptzpRxTciqD3MgEHtifP/YnJpIo58Xu+ne4BejlbkuaLfXn/nz6HFiw29bLpj2PNMdGg==", "optional": true, "engines": { "node": ">= 0.4" @@ -4489,13 +4493,18 @@ } }, "node_modules/call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4537,9 +4546,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001581", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001581.tgz", - "integrity": "sha512-whlTkwhqV2tUmP3oYhtNfaWGYHDdS3JYFQBKXxcUR9qqPWsRhFHhoISO2Xnl/g0xyKzht9mI1LZpiNWfMzHixQ==", + "version": "1.0.30001587", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001587.tgz", + "integrity": "sha512-HMFNotUmLXn71BQxg8cijvqxnIAofforZOwGsxyXJ0qugTdspUF4sPSJ2vhgprHCB996tIDzEq1ubumPDV8ULA==", "dev": true, "funding": [ { @@ -4556,20 +4565,6 @@ } ] }, - "node_modules/cardinal": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz", - "integrity": "sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==", - "dev": true, - "peer": true, - "dependencies": { - "ansicolors": "~0.3.2", - "redeyed": "~2.1.0" - }, - "bin": { - "cdl": "bin/cdl.js" - } - }, "node_modules/caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", @@ -4735,6 +4730,112 @@ "node": ">=8" } }, + "node_modules/cli-highlight": { + "version": "2.1.11", + "resolved": "https://registry.npmjs.org/cli-highlight/-/cli-highlight-2.1.11.tgz", + "integrity": "sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg==", + "dev": true, + "peer": true, + "dependencies": { + "chalk": "^4.0.0", + "highlight.js": "^10.7.1", + "mz": "^2.4.0", + "parse5": "^5.1.1", + "parse5-htmlparser2-tree-adapter": "^6.0.0", + "yargs": "^16.0.0" + }, + "bin": { + "highlight": "bin/highlight" + }, + "engines": { + "node": ">=8.0.0", + "npm": ">=5.0.0" + } + }, + "node_modules/cli-highlight/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "peer": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/cli-highlight/node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "peer": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/cli-highlight/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-highlight/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "peer": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-highlight/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "peer": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/cli-highlight/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "peer": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/cli-spinners": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", @@ -4896,6 +4997,12 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/codecov/node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, "node_modules/collect-v8-coverage": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", @@ -5729,16 +5836,20 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.3.tgz", + "integrity": "sha512-h3GBouC+RPtNX2N0hHVLo2ZwPYurq8mLmXpOLTsw71gr7lHt5VaI4vVkDUNOfiWmm48JEXe3VM7PmLX45AMmmg==", "dependencies": { - "get-intrinsic": "^1.2.1", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-properties": { @@ -5968,6 +6079,12 @@ "safer-buffer": "^2.1.0" } }, + "node_modules/ecc-jsbn/node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, "node_modules/ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", @@ -6112,9 +6229,9 @@ "optional": true }, "node_modules/electron-to-chromium": { - "version": "1.4.650", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.650.tgz", - "integrity": "sha512-sYSQhJCJa4aGA1wYol5cMQgekDBlbVfTRavlGZVr3WZpDdOPcp6a6xUnFfrt8TqZhsBYYbDxJZCjGfHuGupCRQ==", + "version": "1.4.667", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.667.tgz", + "integrity": "sha512-66L3pLlWhTNVUhnmSA5+qDM3fwnXsM6KAqE36e2w4KN0g6pkEtlT5bs41FQtQwVwKnfhNBXiWRLPs30HSxd7Kw==", "dev": true }, "node_modules/emittery": { @@ -6444,6 +6561,25 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-set-tostringtag": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", @@ -6526,9 +6662,9 @@ } }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", "engines": { "node": ">=6" } @@ -7124,9 +7260,9 @@ } }, "node_modules/fastq": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.0.tgz", - "integrity": "sha512-zGygtijUMT7jnk3h26kUms3BkSDp4IfIKjmnqI2tvx6nuBfiF1UqOxbnLfzdv+apBy+53oaImsKtMw/xYbW+1w==", + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", "dev": true, "dependencies": { "reusify": "^1.0.4" @@ -7503,6 +7639,20 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -7618,15 +7768,19 @@ "dev": true }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -7653,13 +7807,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "optional": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -8040,11 +8195,11 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8073,12 +8228,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "optional": true, "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -8118,9 +8273,9 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", + "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", "dependencies": { "function-bind": "^1.1.2" }, @@ -8187,6 +8342,16 @@ "node": ">=10" } }, + "node_modules/highlight.js": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", + "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "dev": true, + "peer": true, + "engines": { + "node": "*" + } + }, "node_modules/homedir-polyfill": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", @@ -8242,6 +8407,11 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, + "node_modules/hpagent": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-0.1.2.tgz", + "integrity": "sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ==" + }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -8368,9 +8538,9 @@ ] }, "node_modules/ignore": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz", - "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", "dev": true, "engines": { "node": ">= 4" @@ -8634,12 +8804,12 @@ } }, "node_modules/internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "optional": true, "dependencies": { - "get-intrinsic": "^1.2.2", + "es-errors": "^1.3.0", "hasown": "^2.0.0", "side-channel": "^1.0.4" }, @@ -8686,10 +8856,17 @@ "url": "https://opencollective.com/ioredis" } }, - "node_modules/ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==" + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } }, "node_modules/is-alphabetical": { "version": "1.0.4", @@ -8716,14 +8893,16 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "optional": true, "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -9101,12 +9280,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "optional": true, "dependencies": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -9903,10 +10082,9 @@ } }, "node_modules/jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", - "dev": true + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==" }, "node_modules/jsesc": { "version": "2.5.2", @@ -10759,9 +10937,9 @@ } }, "node_modules/marked": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/marked/-/marked-11.2.0.tgz", - "integrity": "sha512-HR0m3bvu0jAPYiIvLUUQtdg1g6D247//lvcekpHO1WMvbwDlwSkZAX9Lw4F4YHE1T0HaaNve0tuAWuV1UJ6vtw==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-12.0.0.tgz", + "integrity": "sha512-Vkwtq9rLqXryZnWaQc86+FHLC6tr/fycMfYAhiOIXkrNmeGAyhSxjqu0Rs1i0bBqw5u0S7+lV9fdH2ZSVaoa0w==", "dev": true, "peer": true, "bin": { @@ -10772,15 +10950,15 @@ } }, "node_modules/marked-terminal": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-6.2.0.tgz", - "integrity": "sha512-ubWhwcBFHnXsjYNsu+Wndpg0zhY4CahSpPlA70PlO0rR9r2sZpkyU+rkCsOWH+KMEkx847UpALON+HWgxowFtw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-7.0.0.tgz", + "integrity": "sha512-sNEx8nn9Ktcm6pL0TnRz8tnXq/mSS0Q1FRSwJOAqw4lAB4l49UeDf85Gm1n9RPFm5qurCPjwi1StAQT2XExhZw==", "dev": true, "peer": true, "dependencies": { "ansi-escapes": "^6.2.0", - "cardinal": "^2.1.1", "chalk": "^5.3.0", + "cli-highlight": "^2.1.11", "cli-table3": "^0.6.3", "node-emoji": "^2.1.3", "supports-hyperlinks": "^3.0.0" @@ -10789,7 +10967,7 @@ "node": ">=16.0.0" }, "peerDependencies": { - "marked": ">=1 <12" + "marked": ">=1 <13" } }, "node_modules/marked-terminal/node_modules/@colors/colors": { @@ -11894,11 +12072,12 @@ } }, "node_modules/mqtt/node_modules/bl": { - "version": "6.0.10", - "resolved": "https://registry.npmjs.org/bl/-/bl-6.0.10.tgz", - "integrity": "sha512-F14DFhDZfxtVm2FY0k9kG2lWAwzZkO9+jX3Ytuoy/V0E1/5LBuBzzQHXAjqpxXEDIpmTPZZf5GVIGPQcLxFpaA==", + "version": "6.0.11", + "resolved": "https://registry.npmjs.org/bl/-/bl-6.0.11.tgz", + "integrity": "sha512-Ok/NWrEA0mlEEbWzckkZVLq6Nv1m2xZ+i9Jq5hZ9Ph/YEcP5dExqls9wUzpluhQRPzdeT8oZNOXAytta6YN8pQ==", "dev": true, "dependencies": { + "@types/readable-stream": "^4.0.0", "buffer": "^6.0.3", "inherits": "^2.0.4", "readable-stream": "^4.2.0" @@ -12128,9 +12307,9 @@ "dev": true }, "node_modules/nise": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.7.tgz", - "integrity": "sha512-wWtNUhkT7k58uvWTB/Gy26eA/EJKtPZFVAhEilN5UYVmmGRYOURbejRUyKm0Uu9XVEW7K5nBOZfR8VMB4QR2RQ==", + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.9.tgz", + "integrity": "sha512-qOnoujW4SV6e40dYxJOb3uvuoPHtmLzIk4TFo+j0jPJoC+5Z9xja5qH5JZobEPsa8+YYphMrOSwnrshEhG2qww==", "dev": true, "dependencies": { "@sinonjs/commons": "^3.0.0", @@ -16118,6 +16297,30 @@ "node": ">=0.10.0" } }, + "node_modules/parse5": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "dev": true, + "peer": true + }, + "node_modules/parse5-htmlparser2-tree-adapter": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz", + "integrity": "sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==", + "dev": true, + "peer": true, + "dependencies": { + "parse5": "^6.0.1" + } + }, + "node_modules/parse5-htmlparser2-tree-adapter/node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true, + "peer": true + }, "node_modules/passport": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/passport/-/passport-0.7.0.tgz", @@ -16434,9 +16637,9 @@ } }, "node_modules/prettier": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.4.tgz", - "integrity": "sha512-FWu1oLHKCrtpO1ypU6J0SbK2d9Ckwysq6bHj/uaCP26DxrPpppCLQRGVuqAxSTvhF00AcvDRyYrLNW7ocBhFFQ==", + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", + "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", "dev": true, "bin": { "prettier": "bin/prettier.cjs" @@ -16996,16 +17199,6 @@ "node": ">=8" } }, - "node_modules/redeyed": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/redeyed/-/redeyed-2.1.1.tgz", - "integrity": "sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==", - "dev": true, - "peer": true, - "dependencies": { - "esprima": "~4.0.0" - } - }, "node_modules/redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", @@ -17032,14 +17225,15 @@ "dev": true }, "node_modules/regexp.prototype.flags": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", - "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "optional": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "set-function-name": "^2.0.0" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -17930,6 +18124,12 @@ "node": ">=0.10.0" } }, + "node_modules/rewire/node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, "node_modules/rewire/node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -18092,13 +18292,13 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.2.tgz", - "integrity": "sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "optional": true, "dependencies": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" }, "engines": { @@ -18126,6 +18326,35 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" }, + "node_modules/sdk-es7": { + "name": "@elastic/elasticsearch", + "version": "7.13.0", + "resolved": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", + "integrity": "sha1-KH2HuyhOIkuGAit0KRVmPxI43So=", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.3.1", + "hpagent": "^0.1.1", + "ms": "^2.1.3", + "secure-json-parse": "^2.4.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/sdk-es8": { + "name": "@elastic/elasticsearch", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.12.1.tgz", + "integrity": "sha512-/dJtxtvoN2vRXip6xUrEyzthhzVUOKL8L9YNq25HpMwqiqrJTK70/dOp6GM8oTVQ87UPyJBiiCxQY2+cvg2XWw==", + "dependencies": { + "@elastic/transport": "^8.4.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/secure-json-parse": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", @@ -18138,9 +18367,9 @@ "dev": true }, "node_modules/semantic-release": { - "version": "23.0.0", - "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-23.0.0.tgz", - "integrity": "sha512-Jz7jEWO2igTtske112gC4PPE2whCMVrsgxUPG3/SZI7VE357suIUZFlJd1Yu0g2I6RPc2HxNEfUg7KhmDTjwqg==", + "version": "23.0.2", + "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-23.0.2.tgz", + "integrity": "sha512-OnVYJ6Xgzwe1x8MKswba7RU9+5djS1MWRTrTn5qsq3xZYpslroZkV9Pt0dA2YcIuieeuSZWJhn+yUWoBUHO5Fw==", "dev": true, "peer": true, "dependencies": { @@ -18162,8 +18391,8 @@ "hosted-git-info": "^7.0.0", "import-from-esm": "^1.3.1", "lodash-es": "^4.17.21", - "marked": "^11.0.0", - "marked-terminal": "^6.0.0", + "marked": "^12.0.0", + "marked-terminal": "^7.0.0", "micromatch": "^4.0.2", "p-each-series": "^3.0.0", "p-reduce": "^3.0.0", @@ -18895,13 +19124,14 @@ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" }, "node_modules/set-function-length": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", - "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", + "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", "dependencies": { - "define-data-property": "^1.1.1", + "define-data-property": "^1.1.2", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.3", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.1" }, @@ -19014,14 +19244,18 @@ "dev": true }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz", + "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==", "optional": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -19198,9 +19432,9 @@ } }, "node_modules/sinon/node_modules/diff": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz", - "integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", "dev": true, "engines": { "node": ">=0.3.1" @@ -19300,15 +19534,15 @@ } }, "node_modules/socks": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", - "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.3.tgz", + "integrity": "sha512-vfuYK48HXCTFD03G/1/zkIls3Ebr2YNa4qU9gHDZdblHLiqhJrJGkY3+0Nx0JpN9qBhJbVObc1CNciT1bIZJxw==", "dependencies": { - "ip": "^2.0.0", + "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" }, "engines": { - "node": ">= 10.13.0", + "node": ">= 10.0.0", "npm": ">= 3.0.0" } }, @@ -19435,9 +19669,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.16", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.16.tgz", - "integrity": "sha512-eWN+LnM3GR6gPu35WxNgbGl8rmY1AEmoMDvL/QD6zYmPWgywxWqJWNdLGT+ke8dKNWrcYgYjPpG5gbTfghP8rw==", + "version": "3.0.17", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz", + "integrity": "sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg==", "dev": true }, "node_modules/split": { @@ -19461,10 +19695,9 @@ } }, "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==" }, "node_modules/sql-summary": { "version": "1.0.1", @@ -19497,6 +19730,12 @@ "node": ">=0.10.0" } }, + "node_modules/sshpk/node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, "node_modules/ssri": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", @@ -20486,14 +20725,14 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.1.tgz", + "integrity": "sha512-RSqu1UEuSlrBhHTWC8O9FnPjOduNs4M7rJ4pRKoEjtx1zUNOPN2sSXHLDX+Y2WPbHIxbvg4JFo2DNAEfPIKWoQ==", "optional": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -20607,9 +20846,9 @@ } }, "node_modules/undici": { - "version": "5.28.2", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz", - "integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==", + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -20924,7 +21163,7 @@ "node_modules/uWebSockets.js": { "version": "20.34.0", "resolved": "https://github.com/uNetworking/uWebSockets.js/archive/refs/tags/v20.34.0.tar.gz", - "integrity": "sha512-CHw0g1nmfNi8R9KrnMGp8KVjiO2sK7N+eVPPvnYR7A1GpJ2ozTCpx/C80LbvVghaxJC+SfZw6XsehZvRPHnamg==", + "integrity": "sha1-cL3FVRzbtNmWQbSd3/uTrs8YLAI= sha512-CHw0g1nmfNi8R9KrnMGp8KVjiO2sK7N+eVPPvnYR7A1GpJ2ozTCpx/C80LbvVghaxJC+SfZw6XsehZvRPHnamg==", "license": "Apache-2.0" }, "node_modules/v8-compile-cache": { @@ -21085,16 +21324,16 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", - "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.14.tgz", + "integrity": "sha512-VnXFiIW8yNn9kIHN88xvZ4yOWchftKDsRJ8fEPacX/wl1lOvBrhsJ/OeJCXq7B0AaijRuqgzSKalJoPk+D8MPg==", "optional": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.4", + "available-typed-arrays": "^1.0.6", + "call-bind": "^1.0.5", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.1" }, "engines": { "node": ">= 0.4" diff --git a/package.json b/package.json index d59d9fa923..418480c2d7 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,8 @@ "lib": "lib" }, "dependencies": { - "@elastic/elasticsearch": "8.11.0", + "sdk-es7": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", + "sdk-es8": "npm:@elastic/elasticsearch@8.12.1", "aedes": "0.46.3", "bluebird": "3.7.2", "cli-color": "2.0.3", diff --git a/test/core/backend/BackendStorage.test.js b/test/core/backend/BackendStorage.test.js index 0978a5deec..61e455b14d 100644 --- a/test/core/backend/BackendStorage.test.js +++ b/test/core/backend/BackendStorage.test.js @@ -2,7 +2,6 @@ const should = require("should"); const mockrequire = require("mock-require"); -const { Client: ElasticsearchClient } = require("@elastic/elasticsearch"); const KuzzleMock = require("../../mocks/kuzzle.mock"); @@ -30,7 +29,6 @@ describe("Backend", () => { should(application.storage.StorageClient).be.a.Function(); const client = new application.storage.StorageClient({ maxRetries: 42 }); - should(client).be.instanceOf(ElasticsearchClient); should(client.connectionPool.connections[0].url.toString()).be.eql( "http://es:9200/", ); @@ -52,9 +50,6 @@ describe("Backend", () => { "http://es:9200"; should(application.storage._client).be.null(); - should(application.storage.storageClient).be.instanceOf( - ElasticsearchClient, - ); should( application.storage.storageClient.connectionPool.connections[0].url.toString(), ).be.eql("http://es:9200/"); diff --git a/test/core/plugin/context/context.test.js b/test/core/plugin/context/context.test.js index 1e998e2bf3..1f188d2de5 100644 --- a/test/core/plugin/context/context.test.js +++ b/test/core/plugin/context/context.test.js @@ -6,7 +6,6 @@ const mockrequire = require("mock-require"); const should = require("should"); const sinon = require("sinon"); const _ = require("lodash"); -const { Client: ESClient } = require("@elastic/elasticsearch"); const { Request, @@ -103,7 +102,7 @@ describe("Plugin Context", () => { it("should expose the ESClient constructor", () => { const storageClient = new context.constructors.ESClient(); - should(storageClient).be.instanceOf(ESClient); + should(storageClient).be.instanceOf(context.constructors.ESClient); }); it("should allow to instantiate an ESClient connected to the ES cluster", () => { @@ -127,21 +126,21 @@ describe("Plugin Context", () => { it("should replicate the right request information", () => { let request = new Request( { + _id: "_id", action: "action", + collection: "collection", controller: "controller", + error: new Error("error"), foobar: "foobar", - _id: "_id", index: "index", - collection: "collection", + jwt: "jwt", result: "result", - error: new Error("error"), status: 666, - jwt: "jwt", volatile: { foo: "bar" }, }, { - protocol: "protocol", connectionId: "connectionId", + protocol: "protocol", }, ), pluginRequest = new context.constructors.Request(request, {}); @@ -170,33 +169,33 @@ describe("Plugin Context", () => { it("should override origin request data with provided ones", () => { let request = new Request( { + _id: "_id", action: "action", + bar: "bar", + collection: "collection", controller: "controller", + error: new Error("error"), foo: "foo", - bar: "bar", - _id: "_id", index: "index", - collection: "collection", + jwt: "jwt", result: "result", - error: new Error("error"), status: 666, - jwt: "jwt", volatile: { foo: "bar" }, }, { - protocol: "protocol", connectionId: "connectionId", + protocol: "protocol", }, ), pluginRequest = new context.constructors.Request(request, { action: "pluginAction", + collection: "pluginCollection", controller: "pluginController", foo: false, from: 0, - size: 99, - collection: "pluginCollection", jwt: null, - volatile: { foo: "overridden", bar: "baz" }, + size: 99, + volatile: { bar: "baz", foo: "overridden" }, }); should(pluginRequest.context.protocol).be.eql("protocol"); @@ -215,15 +214,15 @@ describe("Plugin Context", () => { should(pluginRequest.input.args.index).be.eql("index"); should(pluginRequest.input.args.collection).be.eql("pluginCollection"); should(pluginRequest.input.volatile).match({ - foo: "overridden", bar: "baz", + foo: "overridden", }); }); it("should allow building a request without providing another one", () => { const rq = new context.constructors.Request({ - controller: "foo", action: "bar", + controller: "foo", }); should(rq).be.instanceOf(KuzzleRequest); @@ -234,20 +233,20 @@ describe("Plugin Context", () => { it("should expose all error objects as capitalized constructors", () => { const errors = { + BadRequestError, + ExternalServiceError, + ForbiddenError, + GatewayTimeoutError, + InternalError, KuzzleError, - UnauthorizedError, - TooManyRequestsError, - SizeLimitError, - ServiceUnavailableError, - PreconditionError, - PluginImplementationError, - PartialError, NotFoundError, - InternalError, - GatewayTimeoutError, - ForbiddenError, - ExternalServiceError, - BadRequestError, + PartialError, + PluginImplementationError, + PreconditionError, + ServiceUnavailableError, + SizeLimitError, + TooManyRequestsError, + UnauthorizedError, }; should(context.errors).be.an.Object().and.not.be.empty(); @@ -259,7 +258,7 @@ describe("Plugin Context", () => { }); it("should expose the right accessors", () => { - ["verbose", "info", "debug", "warn", "error"].forEach((level) => { + for (const level of ["verbose", "info", "debug", "warn", "error"]) { should(context.log[level]).be.an.instanceOf(Function); context.log[level]("test"); @@ -267,7 +266,7 @@ describe("Plugin Context", () => { should(kuzzle.log[level]) .calledOnce() .calledWithExactly("[pluginName] test"); - }); + } should(context.accessors).be.an.Object().and.not.be.empty(); should(context.accessors).have.properties([ @@ -415,12 +414,12 @@ describe("Plugin Context", () => { kuzzle.pipe.resolves("pipe chain result"); const eventName = "backHome"; const payload = { - question: "whose motorcycle is this?", - answer: "it's a chopper, baby.", - anotherQuestion: "whose chopper is this, then?", anotherAnswer: "it's Zed's", - yetAnotherQuestion: "who's Zed?", + anotherQuestion: "whose chopper is this, then?", + answer: "it's a chopper, baby.", + question: "whose motorcycle is this?", yetAnotherAnswer: "Zed's dead, baby, Zed's dead.", + yetAnotherQuestion: "who's Zed?", }; const result = await context.accessors.trigger(eventName, payload); @@ -556,8 +555,8 @@ describe("Plugin Context", () => { return should( context.accessors.execute( new Request({ - controller: "realtime", action: "subscribe", + controller: "realtime", }), ), ).be.rejectedWith(PluginImplementationError, { @@ -568,8 +567,8 @@ describe("Plugin Context", () => { return should( context.accessors.execute( new Request({ - controller: "realtime", action: "unsubscribe", + controller: "realtime", }), ), ).be.rejectedWith(PluginImplementationError, { @@ -597,8 +596,8 @@ describe("Plugin Context", () => { mockedStrategy, ); should(kuzzle.pipe).calledWith("core:auth:strategyAdded", { - pluginName: "pluginName", name: "foo", + pluginName: "pluginName", strategy: mockedStrategy, }); }); @@ -639,8 +638,8 @@ describe("Plugin Context", () => { "foo", ); should(kuzzle.pipe).calledWith("core:auth:strategyRemoved", { - pluginName: "pluginName", name: "foo", + pluginName: "pluginName", }); }); }); diff --git a/tsconfig.json b/tsconfig.json index cb6307afb4..fd76dcd2ec 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,7 +15,8 @@ ] }, "esModuleInterop": true, - "resolveJsonModule": true + "resolveJsonModule": true, + "skipDefaultLibCheck": true }, "rootDir": "lib/", "include": [ From 0fac6fec30d8bf160ab9056484562ed4a7d396c0 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 14 Feb 2024 09:12:04 +0100 Subject: [PATCH 12/59] docs(documentation): add a new panel to talk about ES8 --- .kuzzlerc.sample.jsonc | 2 +- doc/2/guides/elasticsearch/index.md | 14 +++ doc/2/guides/elasticsearch/v8/index.md | 118 +++++++++++++++++++++++++ 3 files changed, 133 insertions(+), 1 deletion(-) create mode 100644 doc/2/guides/elasticsearch/index.md create mode 100644 doc/2/guides/elasticsearch/v8/index.md diff --git a/.kuzzlerc.sample.jsonc b/.kuzzlerc.sample.jsonc index 9f3508e380..4fabf09c2b 100644 --- a/.kuzzlerc.sample.jsonc +++ b/.kuzzlerc.sample.jsonc @@ -787,7 +787,7 @@ // - "strict": Rejects document // See https://www.elastic.co/guide/en/elasticsearch/reference/7.4/dynamic-mapping.html "storageEngine": { - "majorVersion": "8", + "majorVersion": "7", "backend": "elasticsearch", "aliases": [ "storageEngine" diff --git a/doc/2/guides/elasticsearch/index.md b/doc/2/guides/elasticsearch/index.md new file mode 100644 index 0000000000..b034a88227 --- /dev/null +++ b/doc/2/guides/elasticsearch/index.md @@ -0,0 +1,14 @@ +--- +code: false +type: branch +order: 400 +title: Develop on Kuzzle | Guide | Core +meta: + - name: description + content: Write custom backend code for Kuzzle + - name: keywords + content: Kuzzle, Documentation, kuzzle write pluggins, General purpose backend, opensource, Develop on Kuzzle +--- + + + diff --git a/doc/2/guides/elasticsearch/v8/index.md b/doc/2/guides/elasticsearch/v8/index.md new file mode 100644 index 0000000000..1f505d950e --- /dev/null +++ b/doc/2/guides/elasticsearch/v8/index.md @@ -0,0 +1,118 @@ +--- +code: false +type: page +order: 100 +title: Elasticsearch 8 | Develop on Kuzzle | Guide | Core +meta: + - name: description + content: Extend Kuzzle API with controllers and actions + - name: keywords + content: Kuzzle, Documentation, kuzzle write pluggins, General purpose backend, iot, backend, opensource, API Controllers +--- + +# Elasticsearch 8 + + + +Kuzzle uses Elasticsearch as a [NoSQL document store](/core/2/guides/main-concepts/data-storage). + +With Kuzzle, customers **can directly access data stored in the database** as long as they have the rights to do so. + +Kuzzle exposes the [Elasticsearch Query Language](/core/2/guides/main-concepts/querying) in a secure way. It is therefore possible to **take full advantage of the possibilities of Elasticsearch** with boolean queries, aggregations, special fields, etc. + +The support of Elasticsearch 8 has been introduced in Kuzzle 2.30.0. + +The choice has been made to keep kuzzle compatible to avoid breaking changes around the support of ES8. + +We wanted to allow the user to OPT-IN for the feature so no modification is needed on your behalf to stay with Elasticsearch 7. + +By default the majorVersion support will be 7 until Kuzzle v3. + +The new key to change the version supported by is available under + +```json +{ + "services": { + "storageEngine": { + "majorVersion": 8 + } + } +} +``` + +:::warning +You can not set the majorVersion to 8 if you are using a version of Kuzzle that does not support it. +::: + +:::info +Kuzzle cannot connect to both ES7 and ES8 at the same time. +::: + +Once the version is set to 8, Kuzzle will use the Elasticsearch 8 API to communicate with the database. + +You will find below an example of a `docker-compose.yml` file to run Kuzzle with Elasticsearch 8. + +```yaml +version: '3.8' + +services: + node: + image: kuzzleio/kuzzle:2 + depends_on: + redis: + condition: service_healthy + elasticsearch: + condition: service_healthy + ports: + - "7512:7512" + - "7511:7511" + - "7510:7510" + - "9229:9229" + - "1883:1883" + environment: + - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 + - kuzzle_services__storageEngine__commonMapping__dynamic=true + - kuzzle_services__internalCache__node__host=redis + - kuzzle_services__memoryStorage__node__host=redis + - NODE_ENV=${NODE_ENV:-development} + - DEBUG=${DEBUG:-none} + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:7512/_healthcheck"] + timeout: 10s + interval: 10s + retries: 30 + start_period: 1m + + redis: + image: redis:6 + ports: + - '6379:6379' + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] + interval: 1s + timeout: 3s + retries: 30 + + elasticsearch: + image: elasticsearch:8.11.3 + container_name: kuzzle_elasticsearch + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true + ports: + - '9200:9200' + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:9200'] + interval: 2s + timeout: 2s + retries: 10 + ulimits: + nofile: 65536 +``` + +Or you can run `kourou app:scaffold sandbox` to create a new Kuzzle project with a `docker-compose.yml` file that uses Elasticsearch 8. \ No newline at end of file From 312536f5715e252403265d9366219fa9755ae758 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 14 Feb 2024 10:11:29 +0100 Subject: [PATCH 13/59] test(lint): fix lint, modifying the command --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 418480c2d7..2b413bc95f 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dev": "ergol docker/scripts/start-kuzzle-dev.ts -c ./config/ergol.config.json", "doc-error-codes": "node -r ts-node/register doc/build-error-codes", "prepublishOnly": "npm run build", - "prettier": "prettier ./lib ./test ./bin ./features ./plugins/available/functional-test-plugin --write", + "prettier": "prettier ./lib ./test ./features ./plugins/available/functional-test-plugin --write", "test:functional:http": "KUZZLE_PROTOCOL=http cucumber-js --profile http", "test:functional:jest": "jest", "test:functional:legacy:http": "cucumber-js --format progress-bar --profile http ./features-legacy", @@ -21,7 +21,7 @@ "test:functional:legacy": "npm run test:functional:legacy:http && npm run test:functional:legacy:websocket && npm run test:functional:legacy:mqtt", "test:functional:websocket": "KUZZLE_PROTOCOL=websocket cucumber-js --profile websocket", "test:functional": "npm run test:functional:http && npm run test:functional:websocket && npm run test:functional:jest", - "test:lint:js": "eslint ./lib ./test ./bin ./features ./plugins/available/functional-test-plugin", + "test:lint:js": "eslint ./lib ./test ./features ./plugins/available/functional-test-plugin", "test:lint:ts": "eslint ./lib --ext .ts --config .eslintc-ts.json", "test:lint": "npm run test:lint:js && npm run test:lint:ts", "test:unit": "DEBUG= npx --node-arg=--trace-warnings mocha --exit", From de154ba25b200d5379290de86c65fbc4a9b07da4 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 14 Feb 2024 14:59:25 +0100 Subject: [PATCH 14/59] chore(gitignore): modify the gitignore and eslintignore files --- .eslintignore | 5 +- .gitignore | 5 +- lib/service/storage/7/elasticsearch.js | 2926 ------------------------ lib/service/storage/8/elasticsearch.js | 2922 ----------------------- lib/types/storage/7/Elasticsearch.js | 3 - lib/types/storage/8/Elasticsearch.js | 3 - 6 files changed, 8 insertions(+), 5856 deletions(-) delete mode 100644 lib/service/storage/7/elasticsearch.js delete mode 100644 lib/service/storage/8/elasticsearch.js delete mode 100644 lib/types/storage/7/Elasticsearch.js delete mode 100644 lib/types/storage/8/Elasticsearch.js diff --git a/.eslintignore b/.eslintignore index 4fed7e5c36..7a28cbcd43 100644 --- a/.eslintignore +++ b/.eslintignore @@ -58,7 +58,8 @@ lib/types/Token.js lib/types/User.js lib/types/Global.d.ts lib/types/Global.js -lib/types/storage/Elasticsearch.js +lib/types/storage/7/Elasticsearch.js +lib/types/storage/8/Elasticsearch.js lib/util/interfaces.js lib/util/mutex.js lib/util/Inflector.js @@ -127,6 +128,8 @@ lib/api/controllers/debugController.js lib/api/openapi/components/security/index.js lib/config/documentEventAliases.js lib/service/storage/Elasticsearch.js +lib/service/storage/7/elasticsearch.js +lib/service/storage/8/elasticsearch.js lib/types/DebugModule.js lib/util/time.js lib/util/async.js diff --git a/.gitignore b/.gitignore index 56b5bcb271..043c764104 100644 --- a/.gitignore +++ b/.gitignore @@ -163,6 +163,8 @@ lib/model/security/role.js lib/model/security/token.js lib/model/security/user.js lib/service/storage/Elasticsearch.js +lib/service/storage/7/elasticsearch.js +lib/service/storage/8/elasticsearch.js lib/types/ClientConnection.js lib/types/config/DumpConfiguration.js lib/types/config/HttpConfiguration.js @@ -208,7 +210,8 @@ lib/types/realtime/RoomList.js lib/types/RequestPayload.js lib/types/ResponsePayload.js lib/types/RoleDefinition.js -lib/types/storage/Elasticsearch.js +lib/types/storage/7/Elasticsearch.js +lib/types/storage/8/Elasticsearch.js lib/types/StrategyDefinition.js lib/types/Target.js lib/types/Token.js diff --git a/lib/service/storage/7/elasticsearch.js b/lib/service/storage/7/elasticsearch.js deleted file mode 100644 index f39999be83..0000000000 --- a/lib/service/storage/7/elasticsearch.js +++ /dev/null @@ -1,2926 +0,0 @@ -"use strict"; -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ES7 = void 0; -const lodash_1 = __importDefault(require("lodash")); -const sdk_es7_1 = require("sdk-es7"); -const assert_1 = __importDefault(require("assert")); -const ms_1 = __importDefault(require("ms")); -const bluebird_1 = __importDefault(require("bluebird")); -const semver_1 = __importDefault(require("semver")); -const debug_1 = __importDefault(require("../../../util/debug")); -const esWrapper_1 = __importDefault(require("./esWrapper")); -const queryTranslator_1 = __importDefault(require("../commons/queryTranslator")); -const didYouMean_1 = __importDefault(require("../../../util/didYouMean")); -const kerror = __importStar(require("../../../kerror")); -const requestAssertions_1 = require("../../../util/requestAssertions"); -const safeObject_1 = require("../../../util/safeObject"); -const storeScopeEnum_1 = __importDefault(require("../../../core/storage/storeScopeEnum")); -const extractFields_1 = __importDefault(require("../../../util/extractFields")); -const mutex_1 = require("../../../util/mutex"); -const name_generator_1 = require("../../../util/name-generator"); -(0, debug_1.default)("kuzzle:services:elasticsearch"); -const SCROLL_CACHE_PREFIX = "_docscroll_"; -const ROOT_MAPPING_PROPERTIES = [ - "properties", - "_meta", - "dynamic", - "dynamic_templates", -]; -const CHILD_MAPPING_PROPERTIES = ["type"]; -// Used for collection emulation -const HIDDEN_COLLECTION = "_kuzzle_keep"; -const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS -const PRIVATE_PREFIX = "%"; -const PUBLIC_PREFIX = "&"; -const INDEX_PREFIX_POSITION_IN_INDICE = 0; -const INDEX_PREFIX_POSITION_IN_ALIAS = 1; -const NAME_SEPARATOR = "."; -const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; -const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; -// used to check whether we need to wait for ES to initialize or not -var esStateEnum; -(function (esStateEnum) { - esStateEnum[esStateEnum["AWAITING"] = 1] = "AWAITING"; - esStateEnum[esStateEnum["NONE"] = 2] = "NONE"; - esStateEnum[esStateEnum["OK"] = 3] = "OK"; -})(esStateEnum || (esStateEnum = {})); -let esState = esStateEnum.NONE; -/** - * @param {Kuzzle} kuzzle kuzzle instance - * @param {Object} config Service configuration - * @param {storeScopeEnum} scope - * @constructor - */ -class ES7 { - constructor(config, scope = storeScopeEnum_1.default.PUBLIC) { - this._config = config; - this._scope = scope; - this._indexPrefix = - scope === storeScopeEnum_1.default.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; - this._client = null; - this._esWrapper = null; - this._esVersion = null; - this._translator = new queryTranslator_1.default(); - // Allowed root key of a search query - this.searchBodyKeys = [ - "aggregations", - "aggs", - "collapse", - "explain", - "fields", - "from", - "highlight", - "query", - "search_after", - "search_timeout", - "size", - "sort", - "suggest", - "_name", - "_source", - "_source_excludes", - "_source_includes", - ]; - /** - * Only allow stored-scripts in queries - */ - this.scriptKeys = ["script", "_script"]; - this.scriptAllowedArgs = ["id", "params"]; - this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); - this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); - } - get scope() { - return this._scope; - } - /** - * Initializes the elasticsearch client - * - * @override - * @returns {Promise} - */ - async _initSequence() { - if (this._client) { - return; - } - if (global.NODE_ENV !== "development" && - this._config.commonMapping.dynamic === "true") { - global.kuzzle.log.warn([ - "Your dynamic mapping policy is set to 'true' for new fields.", - "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", - 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n")); - } - this._client = new sdk_es7_1.Client(this._config.client); - await this.waitForElasticsearch(); - this._esWrapper = new esWrapper_1.default(this._client); - const { body: { version }, } = await this._client.info(); - if (version && - !semver_1.default.satisfies(semver_1.default.coerce(version.number), ">= 7.0.0")) { - throw kerror.get("services", "storage", "version_mismatch", version.number); - } - this._esVersion = version; - } - /** - * Translate Koncorde filters to Elasticsearch query - * - * @param {Object} filters - Set of valid Koncorde filters - * @returns {Object} Equivalent Elasticsearch query - */ - translateKoncordeFilters(filters) { - return this._translator.translate(filters); - } - /** - * Returns some basic information about this service - * @override - * - * @returns {Promise.} service informations - */ - info() { - const result = { - type: "elasticsearch", - version: this._esVersion, - }; - return this._client - .info() - .then(({ body }) => { - result.version = body.version.number; - result.lucene = body.version.lucene_version; - return this._client.cluster.health(); - }) - .then(({ body }) => { - result.status = body.status; - return this._client.cluster.stats({ human: true }); - }) - .then(({ body }) => { - result.spaceUsed = body.indices.store.size; - result.nodes = body.nodes; - return result; - }) - .catch((error) => this._esWrapper.reject(error)); - } - /** - * Returns detailed multi-level storage stats data - * - * @returns {Promise.} - */ - async stats() { - const esRequest = { - metric: ["docs", "store"], - }; - const { body } = await this._client.indices.stats(esRequest); - const indexes = {}; - let size = 0; - for (const [indice, indiceInfo] of Object.entries(body.indices)) { - const infos = indiceInfo; - // Ignore non-Kuzzle indices - if (!indice.startsWith(PRIVATE_PREFIX) && - !indice.startsWith(PUBLIC_PREFIX)) { - continue; - } - const aliases = await this._getAliasFromIndice(indice); - const alias = aliases[0]; - const indexName = this._extractIndex(alias); - const collectionName = this._extractCollection(alias); - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || - collectionName === HIDDEN_COLLECTION) { - continue; - } - if (!indexes[indexName]) { - indexes[indexName] = { - collections: [], - name: indexName, - size: 0, - }; - } - indexes[indexName].collections.push({ - documentCount: infos.total.docs.count, - name: collectionName, - size: infos.total.store.size_in_bytes, - }); - indexes[indexName].size += infos.total.store.size_in_bytes; - size += infos.total.store.size_in_bytes; - } - return { - indexes: Object.values(indexes), - size, - }; - } - /** - * Scrolls results from previous elasticsearch query. - * Automatically clears the scroll context after the last result page has - * been fetched. - * - * @param {String} scrollId - Scroll identifier - * @param {Object} options - scrollTTL (default scrollTTL) - * - * @returns {Promise.<{ scrollId, hits, aggregations, total }>} - */ - async scroll(scrollId, { scrollTTL } = {}) { - const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; - const esRequest = { - scroll: _scrollTTL, - scroll_id: scrollId, - }; - const cacheKey = SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); - (0, debug_1.default)("Scroll: %o", esRequest); - if (_scrollTTL) { - const scrollDuration = (0, ms_1.default)(_scrollTTL); - if (scrollDuration > this.maxScrollDuration) { - throw kerror.get("services", "storage", "scroll_duration_too_great", _scrollTTL); - } - } - const stringifiedScrollInfo = await global.kuzzle.ask("core:cache:internal:get", cacheKey); - if (!stringifiedScrollInfo) { - throw kerror.get("services", "storage", "unknown_scroll_id"); - } - const scrollInfo = JSON.parse(stringifiedScrollInfo); - try { - const { body } = await this._client.scroll(esRequest); - scrollInfo.fetched += body.hits.hits.length; - if (scrollInfo.fetched >= body.hits.total.value) { - (0, debug_1.default)("Last scroll page fetched: deleting scroll %s", body._scroll_id); - await global.kuzzle.ask("core:cache:internal:del", cacheKey); - await this.clearScroll(body._scroll_id); - } - else { - await global.kuzzle.ask("core:cache:internal:store", cacheKey, JSON.stringify(scrollInfo), { - ttl: (0, ms_1.default)(_scrollTTL) || this.scrollTTL, - }); - } - body.remaining = body.hits.total.value - scrollInfo.fetched; - return await this._formatSearchResult(body, scrollInfo); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Searches documents from elasticsearch with a query - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} searchBody - Search request body (query, sort, etc.) - * @param {Object} options - from (undefined), size (undefined), scroll (undefined) - * - * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} - */ - async search({ index, collection, searchBody, targets, } = {}, { from, size, scroll, } = {}) { - let esIndexes; - if (targets && targets.length > 0) { - const indexes = new Set(); - for (const target of targets) { - for (const targetCollection of target.collections) { - const alias = this._getAlias(target.index, targetCollection); - indexes.add(alias); - } - } - esIndexes = Array.from(indexes).join(","); - } - else { - esIndexes = this._getAlias(index, collection); - } - const esRequest = { - body: this._sanitizeSearchBody(searchBody), - from, - index: esIndexes, - scroll, - size, - trackTotalHits: true, - }; - if (scroll) { - const scrollDuration = (0, ms_1.default)(scroll); - if (scrollDuration > this.maxScrollDuration) { - throw kerror.get("services", "storage", "scroll_duration_too_great", scroll); - } - } - (0, debug_1.default)("Search: %j", esRequest); - try { - const { body } = await this._client.search(esRequest); - if (body._scroll_id) { - const ttl = (esRequest.scroll && (0, ms_1.default)(esRequest.scroll)) || - (0, ms_1.default)(this._config.defaults.scrollTTL); - await global.kuzzle.ask("core:cache:internal:store", SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), JSON.stringify({ - collection, - fetched: body.hits.hits.length, - index, - targets, - }), { ttl }); - body.remaining = body.hits.total.value - body.hits.hits.length; - } - return await this._formatSearchResult(body, { - collection, - index, - targets, - }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Generate a map that associate an alias to a pair of index and collection - * - * @param {*} targets - * @returns - */ - _mapTargetsToAlias(targets) { - const aliasToTargets = {}; - for (const target of targets) { - for (const targetCollection of target.collections) { - const alias = this._getAlias(target.index, targetCollection); - if (!aliasToTargets[alias]) { - aliasToTargets[alias] = { - collection: targetCollection, - index: target.index, - }; - } - } - } - return aliasToTargets; - } - async _formatSearchResult(body, searchInfo = {}) { - let aliasToTargets = {}; - const aliasCache = new Map(); - if (searchInfo.targets) { - /** - * We need to map the alias to the target index and collection, - * so we can later retrieve informations about an index & collection - * based on its alias. - */ - aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); - } - const formatHit = async (hit) => { - let index = searchInfo.index; - let collection = searchInfo.collection; - /** - * If the search has been done on multiple targets, we need to - * retrieve the appropriate index and collection based on the alias - */ - if (hit._index && searchInfo.targets) { - // Caching to reduce call to ES - let aliases = aliasCache.get(hit._index); - if (!aliases) { - // Retrieve all the alias associated to one index - aliases = await this._getAliasFromIndice(hit._index); - aliasCache.set(hit._index, aliases); - } - /** - * Since multiple alias can point to the same index in ES, we need to - * find the first alias that exists in the map of aliases associated - * to the targets. - */ - const alias = aliases.find((_alias) => aliasToTargets[_alias]); - // Retrieve index and collection information based on the matching alias - index = aliasToTargets[alias].index; - collection = aliasToTargets[alias].collection; - } - return { - _id: hit._id, - _score: hit._score, - _source: hit._source, - collection, - highlight: hit.highlight, - index, - }; - }; - async function formatInnerHits(innerHits) { - if (!innerHits) { - return undefined; - } - const formattedInnerHits = {}; - for (const [name, innerHit] of Object.entries(innerHits)) { - formattedInnerHits[name] = await bluebird_1.default.map(innerHit.hits.hits, formatHit); - } - return formattedInnerHits; - } - const hits = await bluebird_1.default.map(body.hits.hits, async (hit) => ({ - inner_hits: await formatInnerHits(hit.inner_hits), - ...(await formatHit(hit)), - })); - return { - aggregations: body.aggregations, - hits, - remaining: body.remaining, - scrollId: body._scroll_id, - suggest: body.suggest, - total: body.hits.total.value, - }; - } - /** - * Gets the document with given ID - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document ID - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async get(index, collection, id) { - const esRequest = { - id, - index: this._getAlias(index, collection), - }; - // Just in case the user make a GET on url /mainindex/test/_search - // Without this test we return something weird: a result.hits.hits with all - // document without filter because the body is empty in HTTP by default - if (esRequest.id === "_search") { - return kerror.reject("services", "storage", "search_as_an_id"); - } - (0, debug_1.default)("Get document: %o", esRequest); - try { - const { body } = await this._client.get(esRequest); - return { - _id: body._id, - _source: body._source, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Returns the list of documents matching the ids given in the body param - * NB: Due to internal Kuzzle mechanism, can only be called on a single - * index/collection, using the body { ids: [.. } syntax. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Document IDs - * - * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} - */ - async mGet(index, collection, ids) { - if (ids.length === 0) { - return { errors: [], item: [] }; - } - const esRequest = { - body: { - docs: ids.map((_id) => ({ - _id, - _index: this._getAlias(index, collection), - })), - }, - }; - (0, debug_1.default)("Multi-get documents: %o", esRequest); - let body; - try { - ({ body } = await this._client.mget(esRequest)); // NOSONAR - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - const errors = []; - const items = []; - for (const doc of body.docs) { - if (doc.found) { - items.push({ - _id: doc._id, - _source: doc._source, - _version: doc._version, - }); - } - else { - errors.push(doc._id); - } - } - return { errors, items }; - } - /** - * Counts how many documents match the filter given in body - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} searchBody - Search request body (query, sort, etc.) - * - * @returns {Promise.} count - */ - async count(index, collection, searchBody = {}) { - const esRequest = { - body: this._sanitizeSearchBody(searchBody), - index: this._getAlias(index, collection), - }; - (0, debug_1.default)("Count: %o", esRequest); - try { - const { body } = await this._client.count(esRequest); - return body.count; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends the new document to elasticsearch - * Cleans data to match elasticsearch specifications - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} content - Document content - * @param {Object} options - id (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { _id, _version, _source } - */ - async create(index, collection, content, { id, refresh, userId = null, injectKuzzleMeta = true, } = {}) { - (0, requestAssertions_1.assertIsObject)(content); - const esRequest = { - body: content, - id, - index: this._getAlias(index, collection), - op_type: id ? "create" : "index", - refresh, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - // Add metadata - if (injectKuzzleMeta) { - esRequest.body._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }; - } - (0, debug_1.default)("Create document: %o", esRequest); - try { - const { body } = await this._client.index(esRequest); - return { - _id: body._id, - _source: esRequest.body, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Creates a new document to Elasticsearch, or replace it if it already exist - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Document content - * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) - * - * @returns {Promise.} { _id, _version, _source, created } - */ - async createOrReplace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { - const esRequest = { - body: content, - id, - index: this._getAlias(index, collection), - refresh, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - // Add metadata - if (injectKuzzleMeta) { - esRequest.body._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - (0, debug_1.default)("Create or replace document: %o", esRequest); - try { - const { body } = await this._client.index(esRequest); - return { - _id: body._id, - _source: esRequest.body, - _version: body._version, - created: body.result === "created", // Needed by the notifier - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends the partial document to elasticsearch with the id to update - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Updated content - * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) - * - * @returns {Promise.<{ _id, _version }>} - */ - async update(index, collection, id, content, { refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { - const esRequest = { - _source: "true", - body: { doc: content }, - id, - index: this._getAlias(index, collection), - refresh, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - if (injectKuzzleMeta) { - // Add metadata - esRequest.body.doc._kuzzle_info = { - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - (0, debug_1.default)("Update document: %o", esRequest); - try { - const { body } = await this._client.update(esRequest); - return { - _id: body._id, - _source: body.get._source, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends the partial document to elasticsearch with the id to update - * Creates the document if it doesn't already exist - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Updated content - * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) - * - * @returns {Promise.<{ _id, _version }>} - */ - async upsert(index, collection, id, content, { defaultValues = {}, refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { - const esRequest = { - _source: "true", - body: { - doc: content, - upsert: { ...defaultValues, ...content }, - }, - id, - index: this._getAlias(index, collection), - refresh, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - // Add metadata - const user = getKuid(userId); - const now = Date.now(); - if (injectKuzzleMeta) { - esRequest.body.doc._kuzzle_info = { - updatedAt: now, - updater: user, - }; - esRequest.body.upsert._kuzzle_info = { - author: user, - createdAt: now, - }; - } - (0, debug_1.default)("Upsert document: %o", esRequest); - try { - const { body } = await this._client.update(esRequest); - return { - _id: body._id, - _source: body.get._source, - _version: body._version, - created: body.result === "created", - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Replaces a document to Elasticsearch - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Document content - * @param {Object} options - refresh (undefined), userId (null) - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async replace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { - const alias = this._getAlias(index, collection); - const esRequest = { - body: content, - id, - index: alias, - refresh, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - if (injectKuzzleMeta) { - // Add metadata - esRequest.body._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - try { - const { body: exists } = await this._client.exists({ id, index: alias }); - if (!exists) { - throw kerror.get("services", "storage", "not_found", id, index, collection); - } - (0, debug_1.default)("Replace document: %o", esRequest); - const { body } = await this._client.index(esRequest); - return { - _id: id, - _source: esRequest.body, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends to elasticsearch the document id to delete - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} options - refresh (undefined) - * - * @returns {Promise} - */ - async delete(index, collection, id, { refresh, } = {}) { - const esRequest = { - id, - index: this._getAlias(index, collection), - refresh, - }; - assertWellFormedRefresh(esRequest); - (0, debug_1.default)("Delete document: %o", esRequest); - try { - await this._client.delete(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - return null; - } - /** - * Deletes all documents matching the provided filters. - * If fetch=false, the max documents write limit is not applied. - * - * Options: - * - size: size of the batch to retrieve documents (no-op if fetch=false) - * - refresh: refresh option for ES - * - fetch: if true, will fetch the documents before delete them - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} options - size (undefined), refresh (undefined), fetch (true) - * - * @returns {Promise.<{ documents, total, deleted, failures: Array<{ _shardId, reason }> }>} - */ - async deleteByQuery(index, collection, query, { refresh, size = 1000, fetch = true, } = {}) { - const esRequest = { - body: this._sanitizeSearchBody({ query }), - index: this._getAlias(index, collection), - scroll: "5s", - size, - }; - if (!(0, safeObject_1.isPlainObject)(query)) { - throw kerror.get("services", "storage", "missing_argument", "body.query"); - } - try { - let documents = []; - if (fetch) { - documents = await this._getAllDocumentsFromQuery(esRequest); - } - (0, debug_1.default)("Delete by query: %o", esRequest); - esRequest.refresh = refresh === "wait_for" ? true : refresh; - const { body } = await this._client.deleteByQuery(esRequest); - return { - deleted: body.deleted, - documents, - failures: body.failures.map(({ shardId, reason }) => ({ - reason, - shardId, - })), - total: body.total, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Delete fields of a document and replace it - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Array} fields - Document fields to be removed - * @param {Object} options - refresh (undefined), userId (null) - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async deleteFields(index, collection, id, fields, { refresh, userId = null, } = {}) { - const alias = this._getAlias(index, collection); - const esRequest = { - id, - index: alias, - }; - try { - (0, debug_1.default)("DeleteFields document: %o", esRequest); - const { body } = await this._client.get(esRequest); - for (const field of fields) { - if (lodash_1.default.has(body._source, field)) { - lodash_1.default.set(body._source, field, undefined); - } - } - body._source._kuzzle_info = { - ...body._source._kuzzle_info, - updatedAt: Date.now(), - updater: getKuid(userId), - }; - const newEsRequest = { - body: body._source, - id, - index: alias, - refresh, - }; - assertNoRouting(newEsRequest); - assertWellFormedRefresh(newEsRequest); - const { body: updated } = await this._client.index(newEsRequest); - return { - _id: id, - _source: body._source, - _version: updated._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Updates all documents matching the provided filters - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} changes - Changes wanted on documents - * @param {Object} options - refresh (undefined), size (undefined) - * - * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} - */ - async updateByQuery(index, collection, query, changes, { refresh, size = 1000, userId = null, } = {}) { - try { - const esRequest = { - body: this._sanitizeSearchBody({ query }), - index: this._getAlias(index, collection), - scroll: "5s", - size, - }; - const documents = await this._getAllDocumentsFromQuery(esRequest); - for (const document of documents) { - document._source = undefined; - document.body = changes; - } - (0, debug_1.default)("Update by query: %o", esRequest); - const { errors, items } = await this.mUpdate(index, collection, documents, { refresh, userId }); - return { - errors, - successes: items, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Updates all documents matching the provided filters - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} changes - Changes wanted on documents - * @param {Object} options - refresh (undefined) - * - * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} - */ - async bulkUpdateByQuery(index, collection, query, changes, { refresh = false, } = {}) { - const script = { - params: {}, - source: "", - }; - const flatChanges = (0, extractFields_1.default)(changes, { alsoExtractValues: true }); - for (const { key, value } of flatChanges) { - script.source += `ctx._source.${key} = params['${key}'];`; - script.params[key] = value; - } - const esRequest = { - body: { - query: this._sanitizeSearchBody({ query }).query, - script, - }, - index: this._getAlias(index, collection), - refresh, - }; - (0, debug_1.default)("Bulk Update by query: %o", esRequest); - let response; - try { - response = await this._client.updateByQuery(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - if (response.body.failures.length) { - const errors = response.body.failures.map(({ shardId, reason }) => ({ - reason, - shardId, - })); - throw kerror.get("services", "storage", "incomplete_update", response.body.updated, errors); - } - return { - updated: response.body.updated, - }; - } - /** - * Execute the callback with a batch of documents of specified size until all - * documents matched by the query have been processed. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Function} callback - callback that will be called with the "hits" array - * @param {Object} options - size (10), scrollTTL ('5s') - * - * @returns {Promise.} Array of results returned by the callback - */ - async mExecute(index, collection, query, callback, { size = 10, scrollTTl = "5s", } = {}) { - const esRequest = { - body: this._sanitizeSearchBody({ query }), - from: 0, - index: this._getAlias(index, collection), - scroll: scrollTTl, - size, - }; - if (!(0, safeObject_1.isPlainObject)(query)) { - throw kerror.get("services", "storage", "missing_argument", "body.query"); - } - const client = this._client; - let results = []; - let processed = 0; - let scrollId = null; - try { - results = await new bluebird_1.default((resolve, reject) => { - this._client.search(esRequest, async function getMoreUntilDone(error, { body: { hits, _scroll_id } }) { - if (error) { - reject(error); - return; - } - scrollId = _scroll_id; - const ret = callback(hits.hits); - results.push(await ret); - processed += hits.hits.length; - if (hits.total.value !== processed) { - client.scroll({ - scroll: esRequest.scroll, - scroll_id: _scroll_id, - }, getMoreUntilDone); - } - else { - resolve(results); - } - }); - }); - } - finally { - this.clearScroll(scrollId); - } - return results; - } - /** - * Creates a new index. - * - * This methods creates an hidden collection in the provided index to be - * able to list it. - * This methods resolves if the index name does not already exists either as - * private or public index. - * - * @param {String} index - Index name - * - * @returns {Promise} - */ - async createIndex(index) { - this._assertValidIndexAndCollection(index); - let body; - try { - body = (await this._client.cat.aliases({ format: "json" })).body; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias: name }) => name); - for (const alias of aliases) { - const indexName = this._extractIndex(alias); - if (index === indexName) { - const indexType = alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX - ? "private" - : "public"; - throw kerror.get("services", "storage", "index_already_exists", indexType, index); - } - } - await this._createHiddenCollection(index); - return null; - } - /** - * Creates an empty collection. - * Mappings and settings will be applied if supplied. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} config - mappings ({}), settings ({}) - * - * @returns {Promise} - */ - async createCollection(index, collection, { mappings = {}, settings = {}, } = {}) { - this._assertValidIndexAndCollection(index, collection); - if (collection === HIDDEN_COLLECTION) { - throw kerror.get("services", "storage", "collection_reserved", HIDDEN_COLLECTION); - } - const mutex = new mutex_1.Mutex(`hiddenCollection/create/${index}`); - try { - await mutex.lock(); - if (await this._hasHiddenCollection(index)) { - await this.deleteCollection(index, HIDDEN_COLLECTION); - } - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - finally { - await mutex.unlock(); - } - const esRequest = { - body: { - aliases: { - [this._getAlias(index, collection)]: {}, - }, - mappings: {}, - settings, - }, - index: await this._getAvailableIndice(index, collection), - wait_for_active_shards: await this._getWaitForActiveShards(), - }; - this._checkDynamicProperty(mappings); - const exists = await this.hasCollection(index, collection); - if (exists) { - return this.updateCollection(index, collection, { mappings, settings }); - } - this._checkMappings(mappings); - esRequest.body.mappings = { - _meta: mappings._meta || this._config.commonMapping._meta, - dynamic: mappings.dynamic || this._config.commonMapping.dynamic, - properties: lodash_1.default.merge(mappings.properties, this._config.commonMapping.properties), - }; - esRequest.body.settings.number_of_replicas = - esRequest.body.settings.number_of_replicas || - this._config.defaultSettings.number_of_replicas; - esRequest.body.settings.number_of_shards = - esRequest.body.settings.number_of_shards || - this._config.defaultSettings.number_of_shards; - try { - await this._client.indices.create(esRequest); - } - catch (error) { - if (lodash_1.default.get(error, "meta.body.error.type") === - "resource_already_exists_exception") { - // race condition: the indice has been created between the "exists" - // check above and this "create" attempt - return null; - } - throw this._esWrapper.formatESError(error); - } - return null; - } - /** - * Retrieves settings definition for index/type - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.<{ settings }>} - */ - async getSettings(index, collection) { - const indice = await this._getIndice(index, collection); - const esRequest = { - index: indice, - }; - (0, debug_1.default)("Get settings: %o", esRequest); - try { - const { body } = await this._client.indices.getSettings(esRequest); - return body[indice].settings.index; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Retrieves mapping definition for index/type - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} options - includeKuzzleMeta (false) - * - * @returns {Promise.<{ dynamic, _meta, properties }>} - */ - async getMapping(index, collection, { includeKuzzleMeta = false, } = {}) { - const indice = await this._getIndice(index, collection); - const esRequest = { - index: indice, - }; - (0, debug_1.default)("Get mapping: %o", esRequest); - try { - const { body } = await this._client.indices.getMapping(esRequest); - const properties = includeKuzzleMeta - ? body[indice].mappings.properties - : lodash_1.default.omit(body[indice].mappings.properties, "_kuzzle_info"); - return { - _meta: body[indice].mappings._meta, - dynamic: body[indice].mappings.dynamic, - properties, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Updates a collection mappings and settings - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} config - mappings ({}), settings ({}) - * - * @returns {Promise} - */ - async updateCollection(index, collection, { mappings = {}, settings = {}, } = {}) { - const esRequest = { - index: await this._getIndice(index, collection), - }; - // If either the putMappings or the putSettings operation fail, we need to - // rollback the whole operation. Since mappings can't be rollback, we try to - // update the settings first, then the mappings and we rollback the settings - // if putMappings fail. - let indexSettings; - try { - indexSettings = await this._getSettings(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - if (!lodash_1.default.isEmpty(settings)) { - await this.updateSettings(index, collection, settings); - } - try { - if (!lodash_1.default.isEmpty(mappings)) { - const previousMappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - await this.updateMapping(index, collection, mappings); - if (this._dynamicChanges(previousMappings, mappings)) { - await this.updateSearchIndex(index, collection); - } - } - } - catch (error) { - const allowedSettings = this.getAllowedIndexSettings(indexSettings); - // Rollback to previous settings - if (!lodash_1.default.isEmpty(settings)) { - await this.updateSettings(index, collection, allowedSettings); - } - throw error; - } - return null; - } - /** - * Given index settings we return a new version of index settings - * only with allowed settings that can be set (during update or create index). - * @param indexSettings the index settings - * @returns {{index: *}} a new index settings with only allowed settings. - */ - getAllowedIndexSettings(indexSettings) { - return { - index: lodash_1.default.omit(indexSettings.index, [ - "creation_date", - "provided_name", - "uuid", - "version", - ]), - }; - } - /** - * Sends an empty UpdateByQuery request to update the search index - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @returns {Promise.} {} - */ - async updateSearchIndex(index, collection) { - const esRequest = { - body: {}, - // @cluster: conflicts when two nodes start at the same time - conflicts: "proceed", - index: this._getAlias(index, collection), - refresh: true, - // This operation can take some time: this should be an ES - // background task. And it's preferable to a request timeout when - // processing large indexes. - wait_for_completion: false, - }; - (0, debug_1.default)("UpdateByQuery: %o", esRequest); - try { - await this._client.updateByQuery(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Update a collection mappings - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} mappings - Collection mappings in ES format - * - * @returns {Promise.<{ dynamic, _meta, properties }>} - */ - async updateMapping(index, collection, mappings = {}) { - const esRequest = { - body: {}, - index: this._getAlias(index, collection), - }; - this._checkDynamicProperty(mappings); - const collectionMappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - this._checkMappings(mappings); - esRequest.body = { - _meta: mappings._meta || collectionMappings._meta, - dynamic: mappings.dynamic || collectionMappings.dynamic, - properties: mappings.properties, - }; - (0, debug_1.default)("Update mapping: %o", esRequest); - try { - await this._client.indices.putMapping(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const fullProperties = lodash_1.default.merge(collectionMappings.properties, mappings.properties); - return { - _meta: esRequest.body._meta, - dynamic: esRequest.body.dynamic, - properties: fullProperties, - }; - } - /** - * Updates a collection settings (eg: analyzers) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} settings - Collection settings in ES format - * - * @returns {Promise} - */ - async updateSettings(index, collection, settings = {}) { - const esRequest = { - index: this._getAlias(index, collection), - }; - await this._client.indices.close(esRequest); - try { - await this._client.indices.putSettings({ ...esRequest, body: settings }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - finally { - await this._client.indices.open(esRequest); - } - return null; - } - /** - * Empties the content of a collection. Keep the existing mapping and settings. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise} - */ - async truncateCollection(index, collection) { - let mappings; - let settings; - const esRequest = { - index: await this._getIndice(index, collection), - }; - try { - mappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - settings = await this._getSettings(esRequest); - settings = { - ...settings, - ...this.getAllowedIndexSettings(settings), - }; - await this._client.indices.delete(esRequest); - await this._client.indices.create({ - ...esRequest, - body: { - aliases: { - [this._getAlias(index, collection)]: {}, - }, - mappings, - settings, - }, - wait_for_active_shards: await this._getWaitForActiveShards(), - }); - return null; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Runs several action and document - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents to import - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.<{ items, errors }> - */ - async import(index, collection, documents, { refresh, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection); - const dateNow = Date.now(); - const esRequest = { - body: documents, - refresh, - timeout, - }; - const kuzzleMeta = { - created: { - author: getKuid(userId), - createdAt: dateNow, - updatedAt: null, - updater: null, - }, - updated: { - updatedAt: dateNow, - updater: getKuid(userId), - }, - }; - assertWellFormedRefresh(esRequest); - this._scriptCheck(documents); - this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); - let response; - try { - response = await this._client.bulk(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const body = response.body; - const result = { - errors: [], - items: [], - }; - let idx = 0; - /** - * @warning Critical code section - * - * bulk body can contain more than 10K elements - */ - for (let i = 0; i < body.items.length; i++) { - const row = body.items[i]; - const action = Object.keys(row)[0]; - const item = row[action]; - if (item.status >= 400) { - const error = { - _id: item._id, - status: item.status, - }; - // update action contain body in "doc" field - // the delete action is not followed by an action payload - if (action === "update") { - error._source = documents[idx + 1].doc; - error._source._kuzzle_info = undefined; - } - else if (action !== "delete") { - error._source = documents[idx + 1]; - error._source._kuzzle_info = undefined; - } - // ES response does not systematicaly include an error object - // (e.g. delete action with 404 status) - if (item.error) { - error.error = { - reason: item.error.reason, - type: item.error.type, - }; - } - result.errors.push({ [action]: error }); - } - else { - result.items.push({ - [action]: { - _id: item._id, - status: item.status, - }, - }); - } - // the delete action is not followed by an action payload - idx = action === "delete" ? idx + 1 : idx + 2; - } - /* end critical code section */ - return result; - } - /** - * Retrieves the complete list of existing collections in the current index - * - * @param {String} index - Index name - * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results - * - * @returns {Promise.} Collection names - */ - async listCollections(index, { includeHidden = false } = {}) { - let body; - try { - ({ body } = await this._client.cat.aliases({ format: "json" })); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias }) => alias); - const schema = this._extractSchema(aliases, { includeHidden }); - return schema[index] || []; - } - /** - * Retrieves the complete list of indexes - * - * @returns {Promise.} Index names - */ - async listIndexes() { - let body; - try { - ({ body } = await this._client.cat.aliases({ format: "json" })); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias }) => alias); - const schema = this._extractSchema(aliases); - return Object.keys(schema); - } - /** - * Returns an object containing the list of indexes and collections - * - * @returns {Object.} Object - */ - async getSchema() { - let body; - try { - ({ body } = await this._client.cat.aliases({ format: "json" })); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias }) => alias); - const schema = this._extractSchema(aliases, { includeHidden: true }); - for (const [index, collections] of Object.entries(schema)) { - schema[index] = collections.filter((c) => c !== HIDDEN_COLLECTION); - } - return schema; - } - /** - * Retrieves the complete list of aliases - * - * @returns {Promise.} [ { alias, index, collection, indice } ] - */ - async listAliases() { - let body; - try { - ({ body } = await this._client.cat.aliases({ format: "json" })); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = []; - for (const { alias, index: indice } of body) { - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { - aliases.push({ - alias, - collection: this._extractCollection(alias), - index: this._extractIndex(alias), - indice, - }); - } - } - return aliases; - } - /** - * Deletes a collection - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise} - */ - async deleteCollection(index, collection) { - const indice = await this._getIndice(index, collection); - const esRequest = { - index: indice, - }; - try { - await this._client.indices.delete(esRequest); - const alias = this._getAlias(index, collection); - if (await this._checkIfAliasExists(alias)) { - await this._client.indices.deleteAlias({ - index: indice, - name: alias, - }); - } - await this._createHiddenCollection(index); - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - return null; - } - /** - * Deletes multiple indexes - * - * @param {String[]} indexes - Index names - * - * @returns {Promise.} - */ - async deleteIndexes(indexes = []) { - if (indexes.length === 0) { - return bluebird_1.default.resolve([]); - } - const deleted = new Set(); - try { - const { body } = await this._client.cat.aliases({ format: "json" }); - const esRequest = body.reduce((request, { alias, index: indice }) => { - const index = this._extractIndex(alias); - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || - !indexes.includes(index)) { - return request; - } - deleted.add(index); - request.index.push(indice); - return request; - }, { index: [] }); - if (esRequest.index.length === 0) { - return []; - } - (0, debug_1.default)("Delete indexes: %o", esRequest); - await this._client.indices.delete(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - return Array.from(deleted); - } - /** - * Deletes an index - * - * @param {String} index - Index name - * - * @returns {Promise} - */ - async deleteIndex(index) { - await this.deleteIndexes([index]); - return null; - } - /** - * Forces a refresh on the collection. - * - * /!\ Can lead to some performance issues. - * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.} { _shards } - */ - async refreshCollection(index, collection) { - const esRequest = { - index: this._getAlias(index, collection), - }; - let _shards; - try { - ({ - body: { _shards }, - } = await this._client.indices.refresh(esRequest)); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - return { _shards }; - } - /** - * Returns true if the document exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document ID - * - * @returns {Promise.} - */ - async exists(index, collection, id) { - const esRequest = { - id, - index: this._getAlias(index, collection), - }; - try { - const { body: exists } = await this._client.exists(esRequest); - return exists; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Returns the list of documents existing with the ids given in the body param - * NB: Due to internal Kuzzle mechanism, can only be called on a single - * index/collection, using the body { ids: [.. } syntax. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Document IDs - * - * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} - */ - async mExists(index, collection, ids) { - if (ids.length === 0) { - return { errors: [], item: [] }; - } - const esRequest = { - _source: "false", - body: { - docs: ids.map((_id) => ({ _id })), - }, - index: this._getAlias(index, collection), - }; - (0, debug_1.default)("mExists: %o", esRequest); - let body; - try { - ({ body } = await this._client.mget(esRequest)); // NOSONAR - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - const errors = []; - const items = []; - for (let i = 0; i < body.docs.length; i++) { - const doc = body.docs[i]; - if (doc.found) { - items.push(doc._id); - } - else { - errors.push(doc._id); - } - } - return { errors, items }; - } - /** - * Returns true if the index exists - * - * @param {String} index - Index name - * - * @returns {Promise.} - */ - async hasIndex(index) { - const indexes = await this.listIndexes(); - return indexes.some((idx) => idx === index); - } - /** - * Returns true if the collection exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.} - */ - async hasCollection(index, collection) { - const collections = await this.listCollections(index); - return collections.some((col) => col === collection); - } - /** - * Returns true if the index has the hidden collection - * - * @param {String} index - Index name - * - * @returns {Promise.} - */ - async _hasHiddenCollection(index) { - const collections = await this.listCollections(index, { - includeHidden: true, - }); - return collections.some((col) => col === HIDDEN_COLLECTION); - } - /** - * Creates multiple documents at once. - * If a content has no id, one is automatically generated and assigned to it. - * If a content has a specified identifier, it is rejected if it already exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mCreate(index, collection, documents, { refresh, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection), kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); - // prepare the mget request, but only for document having a specified id - const { body } = documentsToGet.length > 0 - ? await this._client.mget({ - body: { docs: documentsToGet }, - index: alias, - }) - : { body: { docs: [] } }; - const existingDocuments = body.docs; - const esRequest = { - body: [], - index: alias, - refresh, - timeout, - }; - const toImport = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { - const document = extractedDocuments[i]; - // Documents are retrieved in the same order than we got them from user - if (typeof document._id === "string" && existingDocuments[idx]) { - if (existingDocuments[idx].found) { - document._source._kuzzle_info = undefined; - rejected.push({ - document: { - _id: document._id, - body: document._source, - }, - reason: "document already exists", - status: 400, - }); - } - else { - esRequest.body.push({ - index: { - _id: document._id, - _index: alias, - }, - }); - esRequest.body.push(document._source); - toImport.push(document); - } - idx++; - } - else { - esRequest.body.push({ index: { _index: alias } }); - esRequest.body.push(document._source); - toImport.push(document); - } - } - /* end critical code section */ - return this._mExecute(esRequest, toImport, rejected); - } - /** - * Creates or replaces multiple documents at once. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) - * - * @returns {Promise.<{ items, errors }> - */ - async mCreateOrReplace(index, collection, documents, { refresh, timeout, userId = null, injectKuzzleMeta = true, limits = true, source = true, } = {}) { - let kuzzleMeta = {}; - if (injectKuzzleMeta) { - kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }; - } - const alias = this._getAlias(index, collection); - const esRequest = { - body: [], - index: alias, - refresh, - timeout, - }; - const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); - esRequest.body = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.body.push({ - index: { - _id: extractedDocuments[i]._id, - _index: alias, - }, - }); - esRequest.body.push(extractedDocuments[i]._source); - } - /* end critical code section */ - return this._mExecute(esRequest, extractedDocuments, rejected, { - limits, - source, - }); - } - /** - * Updates multiple documents with one request - * Replacements are rejected if targeted documents do not exist - * (like with the normal "update" method) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mUpdate(index, collection, documents, { refresh = undefined, retryOnConflict = 0, timeout = undefined, userId = null, } = {}) { - const alias = this._getAlias(index, collection), toImport = [], esRequest = { - body: [], - index: alias, - refresh, - timeout, - }, kuzzleMeta = { - _kuzzle_info: { - updatedAt: Date.now(), - updater: getKuid(userId), - }, - }, { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - const extractedDocument = extractedDocuments[i]; - if (typeof extractedDocument._id === "string") { - esRequest.body.push({ - update: { - _id: extractedDocument._id, - _index: alias, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }, - }); - // _source: true => makes ES return the updated document source in the - // response. Required by the real-time notifier component - esRequest.body.push({ - _source: true, - doc: extractedDocument._source, - }); - toImport.push(extractedDocument); - } - else { - extractedDocument._source._kuzzle_info = undefined; - rejected.push({ - document: { - _id: extractedDocument._id, - body: extractedDocument._source, - }, - reason: "document _id must be a string", - status: 400, - }); - } - } - /* end critical code section */ - const response = await this._mExecute(esRequest, toImport, rejected); - // with _source: true, ES returns the updated document in - // response.result.get._source - // => we replace response.result._source with it so that the notifier - // module can seamlessly process all kind of m* response* - response.items = response.items.map((item) => ({ - _id: item._id, - _source: item.get._source, - _version: item._version, - status: item.status, - })); - return response; - } - /** - * Creates or replaces multiple documents at once. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) - * - * @returns {Promise.<{ items, errors }> - */ - async mUpsert(index, collection, documents, { refresh, retryOnConflict = 0, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection); - const esRequest = { - body: [], - refresh, - timeout, - }; - const user = getKuid(userId); - const now = Date.now(); - const kuzzleMeta = { - doc: { - _kuzzle_info: { - updatedAt: now, - updater: user, - }, - }, - upsert: { - _kuzzle_info: { - author: user, - createdAt: now, - }, - }, - }; - const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta, { - prepareMUpsert: true, - requireId: true, - }); - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.body.push({ - update: { - _id: extractedDocuments[i]._id, - _index: alias, - _source: true, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }, - }, { - doc: extractedDocuments[i]._source.changes, - upsert: extractedDocuments[i]._source.default, - }); - // _source: true - // Makes ES return the updated document source in the response. - // Required by the real-time notifier component - } - /* end critical code section */ - const response = await this._mExecute(esRequest, extractedDocuments, rejected); - // with _source: true, ES returns the updated document in - // response.result.get._source - // => we replace response.result._source with it so that the notifier - // module can seamlessly process all kind of m* response* - response.items = response.items.map((item) => ({ - _id: item._id, - _source: item.get._source, - _version: item._version, - created: item.result === "created", // Needed by the notifier - status: item.status, - })); - return response; - } - /** - * Replaces multiple documents at once. - * Replacements are rejected if targeted documents do not exist - * (like with the normal "replace" method) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mReplace(index, collection, documents, { refresh, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection), kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { - prepareMGet: true, - requireId: true, - }); - if (documentsToGet.length < 1) { - return { errors: rejected, items: [] }; - } - const { body } = await this._client.mget({ - body: { docs: documentsToGet }, - index: alias, - }); - const existingDocuments = body.docs; - const esRequest = { - body: [], - refresh, - timeout, - }; - const toImport = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - const document = extractedDocuments[i]; - // Documents are retrieved in the same order than we got them from user - if (existingDocuments[i]?.found) { - esRequest.body.push({ - index: { - _id: document._id, - _index: alias, - }, - }); - esRequest.body.push(document._source); - toImport.push(document); - } - else { - document._source._kuzzle_info = undefined; - rejected.push({ - document: { - _id: document._id, - body: document._source, - }, - reason: "document not found", - status: 404, - }); - } - } - /* end critical code section */ - return this._mExecute(esRequest, toImport, rejected); - } - /** - * Deletes multiple documents with one request - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Documents IDs - * @param {Object} options - timeout (undefined), refresh (undefined) - * - * @returns {Promise.<{ documents, errors }> - */ - async mDelete(index, collection, ids, { refresh, } = {}) { - const query = { ids: { values: [] } }; - const validIds = []; - const partialErrors = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < ids.length; i++) { - const _id = ids[i]; - if (typeof _id === "string") { - validIds.push(_id); - } - else { - partialErrors.push({ - _id, - reason: "document _id must be a string", - status: 400, - }); - } - } - /* end critical code section */ - await this.refreshCollection(index, collection); - const { items } = await this.mGet(index, collection, validIds); - let idx = 0; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < validIds.length; i++) { - const validId = validIds[i]; - const item = items[idx]; - if (item && item._id === validId) { - query.ids.values.push(validId); - idx++; - } - else { - partialErrors.push({ - _id: validId, - reason: "document not found", - status: 404, - }); - } - } - /* end critical code section */ - // @todo duplicated query to get documents body, mGet here and search in - // deleteByQuery - const { documents } = await this.deleteByQuery(index, collection, query, { - refresh, - }); - return { documents, errors: partialErrors }; - } - /** - * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments - * Returns a standardized ES response object, containing the list of - * successfully performed operations, and the rejected ones - * - * @param {Object} esRequest - Elasticsearch request - * @param {Object[]} documents - Document sources (format: {_id, _source}) - * @param {Object[]} partialErrors - pre-rejected documents - * @param {Object} options - limits (true) - * - * @returns {Promise.} results - */ - async _mExecute(esRequest, documents, partialErrors = [], { limits = true, source = true } = {}) { - assertWellFormedRefresh(esRequest); - if (this._hasExceededLimit(limits, documents)) { - return kerror.reject("services", "storage", "write_limit_exceeded"); - } - let response = { body: { items: [] } }; - if (documents.length > 0) { - try { - response = await this._client.bulk(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - const body = response.body; - const successes = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < body.items.length; i++) { - const item = body.items[i]; - const result = item[Object.keys(item)[0]]; - if (result.status >= 400) { - if (result.status === 404) { - partialErrors.push({ - document: { - _id: documents[i]._id, - body: documents[i]._source, - }, - reason: "document not found", - status: result.status, - }); - } - else { - partialErrors.push({ - document: documents[i], - reason: result.error.reason, - status: result.status, - }); - } - } - else { - successes.push({ - _id: result._id, - _source: source ? documents[i]._source : undefined, - _version: result._version, - created: result.result === "created", - get: result.get, - result: result.result, - status: result.status, // used by mUpdate to get the full document body - }); - } - } - /* end critical code section */ - return { - errors: partialErrors, // @todo rename items to documents - items: successes, - }; - } - /** - * Extracts, injects metadata and validates documents contained - * in a Request - * - * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace - * - * @param {Object[]} documents - Documents - * @param {Object} metadata - Kuzzle metadata - * @param {Object} options - prepareMGet (false), requireId (false) - * - * @returns {Object} { rejected, extractedDocuments, documentsToGet } - */ - _extractMDocuments(documents, metadata, { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}) { - const rejected = []; - const extractedDocuments = []; - const documentsToGet = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < documents.length; i++) { - const document = documents[i]; - if (!(0, safeObject_1.isPlainObject)(document.body) && !prepareMUpsert) { - rejected.push({ - document, - reason: "document body must be an object", - status: 400, - }); - } - else if (!(0, safeObject_1.isPlainObject)(document.changes) && prepareMUpsert) { - rejected.push({ - document, - reason: "document changes must be an object", - status: 400, - }); - } - else if (prepareMUpsert && - document.default && - !(0, safeObject_1.isPlainObject)(document.default)) { - rejected.push({ - document, - reason: "document default must be an object", - status: 400, - }); - } - else if (requireId && typeof document._id !== "string") { - rejected.push({ - document, - reason: "document _id must be a string", - status: 400, - }); - } - else { - this._processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet); - } - } - /* end critical code section */ - return { documentsToGet, extractedDocuments, rejected }; - } - _hasExceededLimit(limits, documents) { - return (limits && - documents.length > global.kuzzle.config.limits.documentsWriteCount); - } - _processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet) { - let extractedDocument; - if (prepareMUpsert) { - extractedDocument = { - _source: { - // Do not use destructuring, it's 10x slower - changes: Object.assign({}, metadata.doc, document.changes), - default: Object.assign({}, metadata.upsert, document.changes, document.default), - }, - }; - } - else { - extractedDocument = { - // Do not use destructuring, it's 10x slower - _source: Object.assign({}, metadata, document.body), - }; - } - if (document._id) { - extractedDocument._id = document._id; - } - extractedDocuments.push(extractedDocument); - if (prepareMGet && typeof document._id === "string") { - documentsToGet.push({ - _id: document._id, - _source: false, - }); - } - } - /** - * Throws an error if the provided mapping is invalid - * - * @param {Object} mapping - * @throws - */ - _checkMappings(mapping, path = [], check = true) { - const properties = Object.keys(mapping); - const mappingProperties = path.length === 0 - ? ROOT_MAPPING_PROPERTIES - : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; - for (const property of properties) { - if (check && !mappingProperties.includes(property)) { - const currentPath = [...path, property].join("."); - throw kerror.get("services", "storage", "invalid_mapping", currentPath, (0, didYouMean_1.default)(property, mappingProperties)); - } - if (property === "properties") { - // type definition level, we don't check - this._checkMappings(mapping[property], [...path, "properties"], false); - } - else if (mapping[property]?.properties) { - // root properties level, check for "properties", "dynamic" and "_meta" - this._checkMappings(mapping[property], [...path, property], true); - } - } - } - /** - * Given index + collection, returns the associated alias name. - * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Alias name (eg: '@&nepali.liia') - */ - _getAlias(index, collection) { - return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - } - /** - * Given an alias name, returns the associated index name. - */ - async _checkIfAliasExists(aliasName) { - const { body } = await this._client.indices.existsAlias({ - name: aliasName, - }); - return body; - } - /** - * Given index + collection, returns the associated indice name. - * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Indice name (eg: '&nepali.liia') - * @throws If there is not exactly one indice associated - */ - async _getIndice(index, collection) { - const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - const { body } = await this._client.cat.aliases({ - format: "json", - name: alias, - }); - if (body.length < 1) { - throw kerror.get("services", "storage", "unknown_index_collection"); - } - else if (body.length > 1) { - throw kerror.get("services", "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, '"indices"'); - } - return body[0].index; - } - /** - * Given an ES Request returns the settings of the corresponding indice. - * - * @param esRequest the ES Request with wanted settings. - * @return {Promise<*>} the settings of the indice. - * @private - */ - async _getSettings(esRequest) { - const response = await this._client.indices.getSettings(esRequest); - const index = esRequest.index; - return response.body[index].settings; - } - /** - * Given index + collection, returns an available indice name. - * Use this function when creating the associated indice. Otherwise use `_getAlias`. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Available indice name (eg: '&nepali.liia2') - */ - async _getAvailableIndice(index, collection) { - let indice = this._getAlias(index, collection).substring(INDEX_PREFIX_POSITION_IN_ALIAS); - if (!(await this._client.indices.exists({ index: indice })).body) { - return indice; - } - let notAvailable; - let suffix; - do { - suffix = `.${(0, name_generator_1.randomNumber)(100000)}`; - const overflow = Buffer.from(indice + suffix).length - 255; - if (overflow > 0) { - const indiceBuffer = Buffer.from(indice); - indice = indiceBuffer - .subarray(0, indiceBuffer.length - overflow) - .toString(); - } - const response = await this._client.indices.exists({ - index: indice + suffix, - }); - notAvailable = response.body; - } while (notAvailable); - return indice + suffix; - } - /** - * Given an indice, returns the associated alias name. - * - * @param {String} indice - * - * @returns {String} Alias name (eg: '@&nepali.liia') - * @throws If there is not exactly one alias associated that is prefixed with @ - */ - async _getAliasFromIndice(indice) { - const { body } = await this._client.indices.getAlias({ index: indice }); - const aliases = Object.keys(body[indice].aliases).filter((alias) => alias.startsWith(ALIAS_PREFIX)); - if (aliases.length < 1) { - throw kerror.get("services", "storage", "unknown_index_collection"); - } - return aliases; - } - /** - * Check for each indice whether it has an alias or not. - * When the latter is missing, create one based on the indice name. - * - * This check avoids a breaking change for those who were using Kuzzle before - * alias attribution for each indice turned into a standard (appear in 2.14.0). - */ - async generateMissingAliases() { - try { - const { body } = await this._client.cat.indices({ format: "json" }); - const indices = body.map(({ index: indice }) => indice); - const aliases = await this.listAliases(); - const indicesWithoutAlias = indices.filter((indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice)); - const esRequest = { body: { actions: [] } }; - for (const indice of indicesWithoutAlias) { - esRequest.body.actions.push({ - add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, - }); - } - if (esRequest.body.actions.length > 0) { - await this._client.indices.updateAliases(esRequest); - } - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Throws if index or collection includes forbidden characters - * - * @param {String} index - * @param {String} collection - */ - _assertValidIndexAndCollection(index, collection = null) { - if (!this.isIndexNameValid(index)) { - throw kerror.get("services", "storage", "invalid_index_name", index); - } - if (collection !== null && !this.isCollectionNameValid(collection)) { - throw kerror.get("services", "storage", "invalid_collection_name", collection); - } - } - /** - * Given an alias, extract the associated index. - * - * @param {String} alias - * - * @returns {String} Index name - */ - _extractIndex(alias) { - return alias.substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1); - } - /** - * Given an alias, extract the associated collection. - * - * @param {String} alias - * - * @returns {String} Collection name - */ - _extractCollection(alias) { - const separatorPos = alias.indexOf(NAME_SEPARATOR); - return alias.substr(separatorPos + 1, alias.length); - } - /** - * Given aliases, extract indexes and collections. - * - * @param {Array.} aliases - * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. - * - * @returns {Object.} Indexes as key and an array of their collections as value - */ - _extractSchema(aliases, { includeHidden = false } = {}) { - const schema = {}; - for (const alias of aliases) { - const [indexName, collectionName] = alias - .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) - .split(NAME_SEPARATOR); - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && - (collectionName !== HIDDEN_COLLECTION || includeHidden)) { - if (!schema[indexName]) { - schema[indexName] = []; - } - if (!schema[indexName].includes(collectionName)) { - schema[indexName].push(collectionName); - } - } - } - return schema; - } - /** - * Creates the hidden collection on the provided index if it does not already - * exists - * - * @param {String} index Index name - */ - async _createHiddenCollection(index) { - const mutex = new mutex_1.Mutex(`hiddenCollection/${index}`); - try { - await mutex.lock(); - if (await this._hasHiddenCollection(index)) { - return; - } - const esRequest = { - body: { - aliases: { - [this._getAlias(index, HIDDEN_COLLECTION)]: {}, - }, - settings: { - number_of_replicas: this._config.defaultSettings.number_of_replicas, - number_of_shards: this._config.defaultSettings.number_of_shards, - }, - }, - index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), - wait_for_active_shards: await this._getWaitForActiveShards(), - }; - await this._client.indices.create(esRequest); - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - finally { - await mutex.unlock(); - } - } - /** - * We need to always wait for a minimal number of shards to be available - * before answering to the client. This is to avoid Elasticsearch node - * to return a 404 Not Found error when the client tries to index a - * document in the index. - * To find the best value for this setting, we need to take into account - * the number of nodes in the cluster and the number of shards per index. - */ - async _getWaitForActiveShards() { - const { body } = await this._client.cat.nodes({ format: "json" }); - const numberOfNodes = body.length; - if (numberOfNodes > 1) { - return "all"; - } - return "1"; - } - /** - * Scroll indice in elasticsearch and return all document that match the filter - * /!\ throws a write_limit_exceed error: this method is intended to be used - * by deleteByQuery and updateByQuery - * - * @param {Object} esRequest - Search request body - * - * @returns {Promise.} resolve to an array of documents - */ - async _getAllDocumentsFromQuery(esRequest) { - let { body: { hits, _scroll_id }, } = await this._client.search(esRequest); - if (hits.total.value > global.kuzzle.config.limits.documentsWriteCount) { - throw kerror.get("services", "storage", "write_limit_exceeded"); - } - let documents = hits.hits.map((h) => ({ - _id: h._id, - _source: h._source, - })); - while (hits.total.value !== documents.length) { - ({ - body: { hits, _scroll_id }, - } = await this._client.scroll({ - scroll: esRequest.scroll, - scroll_id: _scroll_id, - })); - documents = documents.concat(hits.hits.map((h) => ({ - _id: h._id, - _source: h._source, - }))); - } - await this.clearScroll(_scroll_id); - return documents; - } - /** - * Clean and normalize the searchBody - * Ensure only allowed parameters are passed to ES - * - * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) - */ - _sanitizeSearchBody(searchBody) { - // Only allow a whitelist of top level properties - for (const key of Object.keys(searchBody)) { - if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { - throw kerror.get("services", "storage", "invalid_search_query", key); - } - } - // Ensure that the body does not include a script - this._scriptCheck(searchBody); - // Avoid empty queries that causes ES to respond with an error. - // Empty queries are turned into match_all queries - if (lodash_1.default.isEmpty(searchBody.query)) { - searchBody.query = { match_all: {} }; - } - return searchBody; - } - /** - * Throw if a script is used in the query. - * - * Only Stored Scripts are accepted - * - * @param {Object} object - */ - _scriptCheck(object) { - for (const [key, value] of Object.entries(object)) { - if (this.scriptKeys.includes(key)) { - for (const scriptArg of Object.keys(value)) { - if (!this.scriptAllowedArgs.includes(scriptArg)) { - throw kerror.get("services", "storage", "invalid_query_keyword", `${key}.${scriptArg}`); - } - } - } - // Every object must be checked here, even the ones nested into an array - else if (typeof value === "object" && value !== null) { - this._scriptCheck(value); - } - } - } - /** - * Checks if a collection name is valid - * @param {string} name - * @returns {Boolean} - */ - isCollectionNameValid(name) { - return _isObjectNameValid(name); - } - /** - * Checks if a collection name is valid - * @param {string} name - * @returns {Boolean} - */ - isIndexNameValid(name) { - return _isObjectNameValid(name); - } - /** - * Clears an allocated scroll - * @param {[type]} id [description] - * @returns {[type]} [description] - */ - async clearScroll(id) { - if (id) { - (0, debug_1.default)("clearing scroll: %s", id); - await this._client.clearScroll({ scroll_id: id }); - } - } - /** - * Loads a configuration value from services.storageEngine and assert a valid - * ms format. - * - * @param {String} key - relative path to the key in configuration - * - * @returns {Number} milliseconds - */ - _loadMsConfig(key) { - const configValue = lodash_1.default.get(this._config, key); - (0, assert_1.default)(typeof configValue === "string", `services.storageEngine.${key} must be a string.`); - const parsedValue = (0, ms_1.default)(configValue); - (0, assert_1.default)(typeof parsedValue === "number", `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`); - return parsedValue; - } - /** - * Returns true if one of the mappings dynamic property changes value from - * false to true - */ - _dynamicChanges(previousMappings, newMappings) { - const previousValues = findDynamic(previousMappings); - for (const [path, previousValue] of Object.entries(previousValues)) { - if (previousValue.toString() !== "false") { - continue; - } - const newValue = lodash_1.default.get(newMappings, path); - if (newValue && newValue.toString() !== "false") { - return true; - } - } - return false; - } - async waitForElasticsearch() { - if (esState !== esStateEnum.NONE) { - while (esState !== esStateEnum.OK) { - await bluebird_1.default.delay(1000); - } - return; - } - esState = esStateEnum.AWAITING; - global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); - while (esState !== esStateEnum.OK) { - try { - // Wait for at least 1 shard to be initialized - const health = await this._client.cluster.health({ - wait_for_no_initializing_shards: true, - }); - if (health.body.number_of_pending_tasks === 0) { - global.kuzzle.log.info("[✔] Elasticsearch is ready"); - esState = esStateEnum.OK; - } - else { - global.kuzzle.log.info(`[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining`); - await bluebird_1.default.delay(1000); - } - } - catch (e) { - await bluebird_1.default.delay(1000); - } - } - } - /** - * Checks if the dynamic properties are correct - */ - _checkDynamicProperty(mappings) { - const dynamicProperties = findDynamic(mappings); - for (const [path, value] of Object.entries(dynamicProperties)) { - // Prevent common mistake - if (typeof value === "boolean") { - lodash_1.default.set(mappings, path, value.toString()); - } - else if (typeof value !== "string") { - throw kerror.get("services", "storage", "invalid_mapping", path, "Dynamic property value should be a string."); - } - if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { - throw kerror.get("services", "storage", "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join('", "')}"`); - } - } - } - _setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta) { - /** - * @warning Critical code section - * - * bulk body can contain more than 10K elements - */ - let lastAction = ""; - const actionNames = ["index", "create", "update", "delete"]; - for (let i = 0; i < esRequest.body.length; i++) { - const item = esRequest.body[i]; - const action = Object.keys(item)[0]; - if (actionNames.indexOf(action) !== -1) { - lastAction = action; - item[action]._index = alias; - if (item[action]?._type) { - item[action]._type = undefined; - } - } - else if (lastAction === "index" || lastAction === "create") { - item._kuzzle_info = kuzzleMeta.created; - } - else if (lastAction === "update") { - this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); - } - } - /* end critical code section */ - } - _setLastActionToKuzzleMetaUpdate(item, kuzzleMeta) { - for (const prop of ["doc", "upsert"]) { - if ((0, safeObject_1.isPlainObject)(item[prop])) { - item[prop]._kuzzle_info = kuzzleMeta.updated; - } - } - } -} -exports.ES7 = ES7; -/** - * Finds paths and values of mappings dynamic properties - * - * @example - * - * findDynamic(mappings); - * { - * "properties.metadata.dynamic": "true", - * "properties.user.properties.address.dynamic": "strict" - * } - */ -function findDynamic(mappings, path = [], results = {}) { - if (mappings.dynamic !== undefined) { - results[path.concat("dynamic").join(".")] = mappings.dynamic; - } - for (const [key, value] of Object.entries(mappings)) { - if ((0, safeObject_1.isPlainObject)(value)) { - findDynamic(value, path.concat(key), results); - } - } - return results; -} -/** - * Forbids the use of the _routing ES option - * - * @param {Object} esRequest - * @throws - */ -function assertNoRouting(esRequest) { - if (esRequest.body._routing) { - throw kerror.get("services", "storage", "no_routing"); - } -} -/** - * Checks if the optional "refresh" argument is well-formed - * - * @param {Object} esRequest - * @throws - */ -function assertWellFormedRefresh(esRequest) { - if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { - throw kerror.get("services", "storage", "invalid_argument", "refresh", '"wait_for", false'); - } -} -function getKuid(userId) { - if (!userId) { - return null; - } - return String(userId); -} -/** - * Checks if an index or collection name is valid - * - * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html - * - * Beware of the length check: ES allows indice names up to 255 bytes, but since - * in Kuzzle we emulate collections as indices, we have to make sure - * that the privacy prefix, the index name, the separator and the collection - * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way - * is to limit index and collection names to 126 bytes and document that - * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) - * - * @param {string} name - * @returns {Boolean} - */ -function _isObjectNameValid(name) { - if (typeof name !== "string" || name.length === 0) { - return false; - } - if (name.toLowerCase() !== name) { - return false; - } - if (Buffer.from(name).length > 126) { - return false; - } - if (name === "_all") { - return false; - } - let valid = true; - for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { - valid = !name.includes(FORBIDDEN_CHARS[i]); - } - return valid; -} -//# sourceMappingURL=elasticsearch.js.map \ No newline at end of file diff --git a/lib/service/storage/8/elasticsearch.js b/lib/service/storage/8/elasticsearch.js deleted file mode 100644 index e3efb3a9ae..0000000000 --- a/lib/service/storage/8/elasticsearch.js +++ /dev/null @@ -1,2922 +0,0 @@ -"use strict"; -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ES8 = void 0; -const lodash_1 = __importDefault(require("lodash")); -const sdk_es8_1 = require("sdk-es8"); -const assert_1 = __importDefault(require("assert")); -const ms_1 = __importDefault(require("ms")); -const bluebird_1 = __importDefault(require("bluebird")); -const semver_1 = __importDefault(require("semver")); -const debug_1 = __importDefault(require("../../../util/debug")); -const esWrapper_1 = __importDefault(require("./esWrapper")); -const queryTranslator_1 = __importDefault(require("../commons/queryTranslator")); -const didYouMean_1 = __importDefault(require("../../../util/didYouMean")); -const kerror = __importStar(require("../../../kerror")); -const requestAssertions_1 = require("../../../util/requestAssertions"); -const safeObject_1 = require("../../../util/safeObject"); -const storeScopeEnum_1 = __importDefault(require("../../../core/storage/storeScopeEnum")); -const extractFields_1 = __importDefault(require("../../../util/extractFields")); -const mutex_1 = require("../../../util/mutex"); -const name_generator_1 = require("../../../util/name-generator"); -(0, debug_1.default)("kuzzle:services:elasticsearch"); -const SCROLL_CACHE_PREFIX = "_docscroll_"; -const ROOT_MAPPING_PROPERTIES = [ - "properties", - "_meta", - "dynamic", - "dynamic_templates", -]; -const CHILD_MAPPING_PROPERTIES = ["type"]; -// Used for collection emulation -const HIDDEN_COLLECTION = "_kuzzle_keep"; -const ALIAS_PREFIX = "@"; // @todo next major release: Add ALIAS_PREFIX in FORBIDDEN_CHARS -const PRIVATE_PREFIX = "%"; -const PUBLIC_PREFIX = "&"; -const INDEX_PREFIX_POSITION_IN_INDICE = 0; -const INDEX_PREFIX_POSITION_IN_ALIAS = 1; -const NAME_SEPARATOR = "."; -const FORBIDDEN_CHARS = `\\/*?"<>| \t\r\n,+#:${NAME_SEPARATOR}${PUBLIC_PREFIX}${PRIVATE_PREFIX}`; -const DYNAMIC_PROPERTY_VALUES = ["true", "false", "strict"]; -// used to check whether we need to wait for ES to initialize or not -var esStateEnum; -(function (esStateEnum) { - esStateEnum[esStateEnum["AWAITING"] = 1] = "AWAITING"; - esStateEnum[esStateEnum["NONE"] = 2] = "NONE"; - esStateEnum[esStateEnum["OK"] = 3] = "OK"; -})(esStateEnum || (esStateEnum = {})); -let esState = esStateEnum.NONE; -/** - * @param {Kuzzle} kuzzle kuzzle instance - * @param {Object} config Service configuration - * @param {storeScopeEnum} scope - * @constructor - */ -class ES8 { - constructor(config, scope = storeScopeEnum_1.default.PUBLIC) { - this._config = config; - this._scope = scope; - this._indexPrefix = - scope === storeScopeEnum_1.default.PRIVATE ? PRIVATE_PREFIX : PUBLIC_PREFIX; - this._client = null; - this._esWrapper = null; - this._esVersion = null; - this._translator = new queryTranslator_1.default(); - // Allowed root key of a search query - this.searchBodyKeys = [ - "aggregations", - "aggs", - "collapse", - "explain", - "fields", - "from", - "highlight", - "query", - "search_after", - "search_timeout", - "size", - "sort", - "suggest", - "_name", - "_source", - "_source_excludes", - "_source_includes", - ]; - /** - * Only allow stored-scripts in queries - */ - this.scriptKeys = ["script", "_script"]; - this.scriptAllowedArgs = ["id", "params"]; - this.maxScrollDuration = this._loadMsConfig("maxScrollDuration"); - this.scrollTTL = this._loadMsConfig("defaults.scrollTTL"); - } - get scope() { - return this._scope; - } - /** - * Initializes the elasticsearch client - * - * @override - * @returns {Promise} - */ - async _initSequence() { - if (this._client) { - return; - } - if (global.NODE_ENV !== "development" && - this._config.commonMapping.dynamic === "true") { - global.kuzzle.log.warn([ - "Your dynamic mapping policy is set to 'true' for new fields.", - "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", - 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n")); - } - this._client = new sdk_es8_1.Client(this._config.client); - await this.waitForElasticsearch(); - this._esWrapper = new esWrapper_1.default(this._client); - const { version } = await this._client.info(); - if (version && !semver_1.default.satisfies(semver_1.default.coerce(version.number), "^8.0.0")) { - throw kerror.get("services", "storage", "version_mismatch", version.number); - } - this._esVersion = version; - } - /** - * Translate Koncorde filters to Elasticsearch query - * - * @param {Object} filters - Set of valid Koncorde filters - * @returns {Object} Equivalent Elasticsearch query - */ - translateKoncordeFilters(filters) { - return this._translator.translate(filters); - } - /** - * Returns some basic information about this service - * @override - * - * @returns {Promise.} service informations - */ - async info() { - const result = { - type: "elasticsearch", - version: this._esVersion, - }; - try { - const info = await this._client.info(); - result.version = info.version.number; - result.lucene = info.version.lucene_version; - const health = await this._client.cluster.health(); - result.status = health.status; - const stats = await this._client.cluster.stats({ human: true }); - result.spaceUsed = stats.indices.store.size; - result.nodes = stats.nodes; - return result; - } - catch (error) { - return this._esWrapper.reject(error); - } - } - /** - * Returns detailed multi-level storage stats data - * - * @returns {Promise.} - */ - async stats() { - const esRequest = { - metric: ["docs", "store"], - }; - const stats = await this._client.indices.stats(esRequest); - const indexes = {}; - let size = 0; - for (const [indice, indiceInfo] of Object.entries(stats.indices)) { - const infos = indiceInfo; - // Ignore non-Kuzzle indices - if (!indice.startsWith(PRIVATE_PREFIX) && - !indice.startsWith(PUBLIC_PREFIX)) { - continue; - } - const aliases = await this._getAliasFromIndice(indice); - const alias = aliases[0]; - const indexName = this._extractIndex(alias); - const collectionName = this._extractCollection(alias); - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || - collectionName === HIDDEN_COLLECTION) { - continue; - } - if (!indexes[indexName]) { - indexes[indexName] = { - collections: [], - name: indexName, - size: 0, - }; - } - indexes[indexName].collections.push({ - documentCount: infos.total.docs.count, - name: collectionName, - size: infos.total.store.size_in_bytes, - }); - indexes[indexName].size += infos.total.store.size_in_bytes; - size += infos.total.store.size_in_bytes; - } - return { - indexes: Object.values(indexes), - size, - }; - } - /** - * Scrolls results from previous elasticsearch query. - * Automatically clears the scroll context after the last result page has - * been fetched. - * - * @param {String} scrollId - Scroll identifier - * @param {Object} options - scrollTTL (default scrollTTL) - * - * @returns {Promise.<{ scrollId, hits, aggregations, total }>} - */ - async scroll(scrollId, { scrollTTL } = {}) { - const _scrollTTL = scrollTTL || this._config.defaults.scrollTTL; - const esRequest = { - scroll: _scrollTTL, - scroll_id: scrollId, - }; - const cacheKey = SCROLL_CACHE_PREFIX + global.kuzzle.hash(esRequest.scroll_id); - (0, debug_1.default)("Scroll: %o", esRequest); - if (_scrollTTL) { - const scrollDuration = (0, ms_1.default)(_scrollTTL); - if (scrollDuration > this.maxScrollDuration) { - throw kerror.get("services", "storage", "scroll_duration_too_great", _scrollTTL); - } - } - const stringifiedScrollInfo = await global.kuzzle.ask("core:cache:internal:get", cacheKey); - if (!stringifiedScrollInfo) { - throw kerror.get("services", "storage", "unknown_scroll_id"); - } - const scrollInfo = JSON.parse(stringifiedScrollInfo); - try { - const body = await this._client.scroll(esRequest); - const totalHitsValue = this._getHitsTotalValue(body.hits); - scrollInfo.fetched += body.hits.hits.length; - if (scrollInfo.fetched >= totalHitsValue) { - (0, debug_1.default)("Last scroll page fetched: deleting scroll %s", body._scroll_id); - await global.kuzzle.ask("core:cache:internal:del", cacheKey); - await this.clearScroll(body._scroll_id); - } - else { - await global.kuzzle.ask("core:cache:internal:store", cacheKey, JSON.stringify(scrollInfo), { - ttl: (0, ms_1.default)(_scrollTTL) || this.scrollTTL, - }); - } - const remaining = totalHitsValue - scrollInfo.fetched; - return await this._formatSearchResult(body, remaining, scrollInfo); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Searches documents from elasticsearch with a query - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} searchBody - Search request body (query, sort, etc.) - * @param {Object} options - from (undefined), size (undefined), scroll (undefined) - * - * @returns {Promise.<{ scrollId, hits, aggregations, suggest, total }>} - */ - async search({ index, collection, searchBody, targets, } = {}, { from, size, scroll, } = {}) { - let esIndexes; - if (targets && targets.length > 0) { - const indexes = new Set(); - for (const target of targets) { - for (const targetCollection of target.collections) { - const alias = this._getAlias(target.index, targetCollection); - indexes.add(alias); - } - } - esIndexes = Array.from(indexes).join(","); - } - else { - esIndexes = this._getAlias(index, collection); - } - const esRequest = { - ...this._sanitizeSearchBody(searchBody), - from, - index: esIndexes, - scroll, - size, - track_total_hits: true, - }; - if (scroll) { - const scrollDuration = (0, ms_1.default)(scroll); - if (scrollDuration > this.maxScrollDuration) { - throw kerror.get("services", "storage", "scroll_duration_too_great", scroll); - } - } - (0, debug_1.default)("Search: %j", esRequest); - try { - const body = await this._client.search(esRequest); - const totalHitsValue = this._getHitsTotalValue(body.hits); - let remaining; - if (body._scroll_id) { - const ttl = (esRequest.scroll && (0, ms_1.default)(esRequest.scroll)) || - (0, ms_1.default)(this._config.defaults.scrollTTL); - await global.kuzzle.ask("core:cache:internal:store", SCROLL_CACHE_PREFIX + global.kuzzle.hash(body._scroll_id), JSON.stringify({ - collection, - fetched: body.hits.hits.length, - index, - targets, - }), { ttl }); - remaining = totalHitsValue - body.hits.hits.length; - } - return await this._formatSearchResult(body, remaining, { - collection, - index, - targets, - }); - } - catch (error) { - console.error(error); - throw this._esWrapper.formatESError(error); - } - } - /** - * Generate a map that associate an alias to a pair of index and collection - * - * @param {*} targets - * @returns - */ - _mapTargetsToAlias(targets) { - const aliasToTargets = {}; - for (const target of targets) { - for (const targetCollection of target.collections) { - const alias = this._getAlias(target.index, targetCollection); - if (!aliasToTargets[alias]) { - aliasToTargets[alias] = { - collection: targetCollection, - index: target.index, - }; - } - } - } - return aliasToTargets; - } - async _formatSearchResult(body, remaining, searchInfo = {}) { - let aliasToTargets = {}; - const aliasCache = new Map(); - if (searchInfo.targets) { - /** - * We need to map the alias to the target index and collection, - * so we can later retrieve informations about an index & collection - * based on its alias. - */ - aliasToTargets = this._mapTargetsToAlias(searchInfo.targets); - } - const formatHit = async (hit) => { - let index = searchInfo.index; - let collection = searchInfo.collection; - /** - * If the search has been done on multiple targets, we need to - * retrieve the appropriate index and collection based on the alias - */ - if (hit._index && searchInfo.targets) { - // Caching to reduce call to ES - let aliases = aliasCache.get(hit._index); - if (!aliases) { - // Retrieve all the alias associated to one index - aliases = await this._getAliasFromIndice(hit._index); - aliasCache.set(hit._index, aliases); - } - /** - * Since multiple alias can point to the same index in ES, we need to - * find the first alias that exists in the map of aliases associated - * to the targets. - */ - const alias = aliases.find((_alias) => aliasToTargets[_alias]); - // Retrieve index and collection information based on the matching alias - index = aliasToTargets[alias].index; - collection = aliasToTargets[alias].collection; - } - return { - _id: hit._id, - _score: hit._score, - _source: hit._source, - collection, - highlight: hit.highlight, - index, - }; - }; - async function formatInnerHits(innerHits) { - if (!innerHits) { - return undefined; - } - const formattedInnerHits = {}; - for (const [name, innerHit] of Object.entries(innerHits)) { - formattedInnerHits[name] = await bluebird_1.default.map(innerHit.hits.hits, formatHit); - } - return formattedInnerHits; - } - const hits = await bluebird_1.default.map(body.hits.hits, async (hit) => ({ - inner_hits: await formatInnerHits(hit.inner_hits), - ...(await formatHit(hit)), - })); - return { - aggregations: body.aggregations, - hits, - remaining, - scrollId: body._scroll_id, - suggest: body.suggest, - total: body.hits.total.value, - }; - } - /** - * Gets the document with given ID - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document ID - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async get(index, collection, id) { - const esRequest = { - id, - index: this._getAlias(index, collection), - }; - // Just in case the user make a GET on url /mainindex/test/_search - // Without this test we return something weird: a result.hits.hits with all - // document without filter because the body is empty in HTTP by default - if (esRequest.id === "_search") { - return kerror.reject("services", "storage", "search_as_an_id"); - } - (0, debug_1.default)("Get document: %o", esRequest); - try { - const body = await this._client.get(esRequest); - return { - _id: body._id, - _source: body._source, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Returns the list of documents matching the ids given in the body param - * NB: Due to internal Kuzzle mechanism, can only be called on a single - * index/collection, using the body { ids: [.. } syntax. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Document IDs - * - * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} - */ - async mGet(index, collection, ids) { - if (ids.length === 0) { - return { errors: [], item: [] }; - } - const esRequest = { - docs: ids.map((_id) => ({ - _id, - _index: this._getAlias(index, collection), - })), - }; - (0, debug_1.default)("Multi-get documents: %o", esRequest); - let body; - try { - body = await this._client.mget(esRequest); // NOSONAR - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - const errors = []; - const items = []; - for (const doc of body.docs) { - if (!("error" in doc) && doc.found) { - items.push({ - _id: doc._id, - _source: doc._source, - _version: doc._version, - }); - } - else { - errors.push(doc._id); - } - } - return { errors, items }; - } - /** - * Counts how many documents match the filter given in body - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} searchBody - Search request body (query, sort, etc.) - * - * @returns {Promise.} count - */ - async count(index, collection, searchBody = {}) { - const esRequest = { - ...this._sanitizeSearchBody(searchBody), - index: this._getAlias(index, collection), - }; - (0, debug_1.default)("Count: %o", esRequest); - try { - const body = await this._client.count(esRequest); - return body.count; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends the new document to elasticsearch - * Cleans data to match elasticsearch specifications - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} content - Document content - * @param {Object} options - id (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { _id, _version, _source } - */ - async create(index, collection, content, { id, refresh, userId = null, injectKuzzleMeta = true, } = {}) { - (0, requestAssertions_1.assertIsObject)(content); - const esRequest = { - document: content, - id, - index: this._getAlias(index, collection), - op_type: id ? "create" : "index", - refresh, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - // Add metadata - if (injectKuzzleMeta) { - esRequest.document._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }; - } - (0, debug_1.default)("Create document: %o", esRequest); - try { - const body = await this._client.index(esRequest); - return { - _id: body._id, - _source: esRequest.document, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Creates a new document to Elasticsearch, or replace it if it already exist - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Document content - * @param {Object} options - refresh (undefined), userId (null), injectKuzzleMeta (true) - * - * @returns {Promise.} { _id, _version, _source, created } - */ - async createOrReplace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { - const esRequest = { - document: content, - id, - index: this._getAlias(index, collection), - refresh, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - // Add metadata - if (injectKuzzleMeta) { - esRequest.document._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - (0, debug_1.default)("Create or replace document: %o", esRequest); - try { - const body = await this._client.index(esRequest); - return { - _id: body._id, - _source: esRequest.document, - _version: body._version, - created: body.result === "created", // Needed by the notifier - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends the partial document to elasticsearch with the id to update - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Updated content - * @param {Object} options - refresh (undefined), userId (null), retryOnConflict (0) - * - * @returns {Promise.<{ _id, _version }>} - */ - async update(index, collection, id, content, { refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { - const esRequest = { - _source: true, - doc: content, - id, - index: this._getAlias(index, collection), - refresh, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - if (injectKuzzleMeta) { - // Add metadata - esRequest.doc._kuzzle_info = { - ...esRequest.doc._kuzzle_info, - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - (0, debug_1.default)("Update document: %o", esRequest); - try { - const body = await this._client.update(esRequest); - return { - _id: body._id, - _source: body.get._source, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends the partial document to elasticsearch with the id to update - * Creates the document if it doesn't already exist - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Updated content - * @param {Object} options - defaultValues ({}), refresh (undefined), userId (null), retryOnConflict (0) - * - * @returns {Promise.<{ _id, _version }>} - */ - async upsert(index, collection, id, content, { defaultValues = {}, refresh, userId = null, retryOnConflict, injectKuzzleMeta = true, } = {}) { - const esRequest = { - _source: true, - doc: content, - id, - index: this._getAlias(index, collection), - refresh, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - upsert: { ...defaultValues, ...content }, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - // Add metadata - const user = getKuid(userId); - const now = Date.now(); - if (injectKuzzleMeta) { - esRequest.doc._kuzzle_info = { - ...esRequest.doc._kuzzle_info, - updatedAt: now, - updater: user, - }; - esRequest.upsert._kuzzle_info = { - ...esRequest.upsert._kuzzle_info, - author: user, - createdAt: now, - }; - } - (0, debug_1.default)("Upsert document: %o", esRequest); - try { - const body = await this._client.update(esRequest); - return { - _id: body._id, - _source: body.get._source, - _version: body._version, - created: body.result === "created", - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Replaces a document to Elasticsearch - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} content - Document content - * @param {Object} options - refresh (undefined), userId (null) - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async replace(index, collection, id, content, { refresh, userId = null, injectKuzzleMeta = true, } = {}) { - const alias = this._getAlias(index, collection); - const esRequest = { - document: content, - id, - index: alias, - refresh, - }; - assertNoRouting(esRequest); - assertWellFormedRefresh(esRequest); - if (injectKuzzleMeta) { - // Add metadata - esRequest.document._kuzzle_info = { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: Date.now(), - updater: getKuid(userId), - }; - } - try { - const exists = await this._client.exists({ id, index: alias }); - if (!exists) { - throw kerror.get("services", "storage", "not_found", id, index, collection); - } - (0, debug_1.default)("Replace document: %o", esRequest); - const body = await this._client.index(esRequest); - return { - _id: id, - _source: esRequest.document, - _version: body._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Sends to elasticsearch the document id to delete - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Object} options - refresh (undefined) - * - * @returns {Promise} - */ - async delete(index, collection, id, { refresh, } = {}) { - const esRequest = { - id, - index: this._getAlias(index, collection), - refresh, - }; - assertWellFormedRefresh(esRequest); - (0, debug_1.default)("Delete document: %o", esRequest); - try { - await this._client.delete(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - return null; - } - /** - * Deletes all documents matching the provided filters. - * If fetch=false, the max documents write limit is not applied. - * - * Options: - * - size: size of the batch to retrieve documents (no-op if fetch=false) - * - refresh: refresh option for ES - * - fetch: if true, will fetch the documents before delete them - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} options - size (undefined), refresh (undefined), fetch (true) - * - * @returns {Promise.<{ documents, total, deleted, failures: Array<{ id, reason }> }>} - */ - async deleteByQuery(index, collection, query, { refresh, size = 1000, fetch = true, } = {}) { - const esRequest = { - ...this._sanitizeSearchBody({ query }), - index: this._getAlias(index, collection), - scroll: "5s", - }; - if (!(0, safeObject_1.isPlainObject)(query)) { - throw kerror.get("services", "storage", "missing_argument", "body.query"); - } - try { - let documents = []; - if (fetch) { - documents = await this._getAllDocumentsFromQuery({ - ...esRequest, - size, - }); - } - (0, debug_1.default)("Delete by query: %o", esRequest); - esRequest.refresh = refresh === "wait_for" ? true : refresh; - const request = { - ...esRequest, - max_docs: size, - }; - if (request.max_docs === -1) { - request.max_docs = undefined; - } - const body = await this._client.deleteByQuery(request); - return { - deleted: body.deleted, - documents, - failures: body.failures.map(({ id, cause }) => ({ - id, - reason: cause.reason, - })), - total: body.total, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Delete fields of a document and replace it - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document id - * @param {Array} fields - Document fields to be removed - * @param {Object} options - refresh (undefined), userId (null) - * - * @returns {Promise.<{ _id, _version, _source }>} - */ - async deleteFields(index, collection, id, fields, { refresh, userId = null, } = {}) { - const alias = this._getAlias(index, collection); - const esRequest = { - id, - index: alias, - }; - try { - (0, debug_1.default)("DeleteFields document: %o", esRequest); - const body = await this._client.get(esRequest); - for (const field of fields) { - if (lodash_1.default.has(body._source, field)) { - lodash_1.default.set(body._source, field, undefined); - } - } - const updatedInfos = { - updatedAt: Date.now(), - updater: getKuid(userId), - }; - if (typeof body._source._kuzzle_info === "object") { - body._source._kuzzle_info = { - ...body._source._kuzzle_info, - ...updatedInfos, - }; - } - else { - body._source._kuzzle_info = updatedInfos; - } - const newEsRequest = { - document: body._source, - id, - index: alias, - refresh, - }; - assertNoRouting(newEsRequest); - assertWellFormedRefresh(newEsRequest); - const updated = await this._client.index(newEsRequest); - return { - _id: id, - _source: body._source, - _version: updated._version, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Updates all documents matching the provided filters - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} changes - Changes wanted on documents - * @param {Object} options - refresh (undefined), size (undefined) - * - * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} - */ - async updateByQuery(index, collection, query, changes, { refresh, size = 1000, userId = null, } = {}) { - try { - const esRequest = { - ...this._sanitizeSearchBody({ query }), - index: this._getAlias(index, collection), - scroll: "5s", - size, - }; - const documents = await this._getAllDocumentsFromQuery(esRequest); - for (const document of documents) { - document._source = undefined; - document.body = changes; - } - (0, debug_1.default)("Update by query: %o", esRequest); - const { errors, items } = await this.mUpdate(index, collection, documents, { refresh, userId }); - return { - errors, - successes: items, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Updates all documents matching the provided filters - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Object} changes - Changes wanted on documents - * @param {Object} options - refresh (undefined) - * - * @returns {Promise.<{ successes: [_id, _source, _status], errors: [ document, status, reason ] }>} - */ - async bulkUpdateByQuery(index, collection, query, changes, { refresh = false, } = {}) { - const script = { - params: {}, - source: "", - }; - const flatChanges = (0, extractFields_1.default)(changes, { alsoExtractValues: true }); - for (const { key, value } of flatChanges) { - script.source += `ctx._source.${key} = params['${key}'];`; - script.params[key] = value; - } - const esRequest = { - index: this._getAlias(index, collection), - query: this._sanitizeSearchBody({ query }).query, - refresh, - script, - }; - (0, debug_1.default)("Bulk Update by query: %o", esRequest); - let response; - try { - response = await this._client.updateByQuery(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - if (response.failures.length) { - const errors = response.failures.map(({ id, cause }) => ({ - cause, - id, - })); - throw kerror.get("services", "storage", "incomplete_update", response.updated, errors); - } - return { - updated: response.updated, - }; - } - /** - * Execute the callback with a batch of documents of specified size until all - * documents matched by the query have been processed. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} query - Query to match documents - * @param {Function} callback - callback that will be called with the "hits" array - * @param {Object} options - size (10), scrollTTL ('5s') - * - * @returns {Promise.} Array of results returned by the callback - */ - async mExecute(index, collection, query, callback, { size = 10, scrollTTl = "5s", } = {}) { - const esRequest = { - ...this._sanitizeSearchBody({ query }), - from: 0, - index: this._getAlias(index, collection), - scroll: scrollTTl, - size, - }; - if (!(0, safeObject_1.isPlainObject)(query)) { - throw kerror.get("services", "storage", "missing_argument", "body.query"); - } - const results = []; - let processed = 0; - let scrollId = null; - try { - let body = await this._client.search(esRequest); - const totalHitsValue = this._getHitsTotalValue(body.hits); - while (processed < totalHitsValue && body.hits.hits.length > 0) { - scrollId = body._scroll_id; - results.push(await callback(body.hits.hits)); - processed += body.hits.hits.length; - body = await this._client.scroll({ - scroll: esRequest.scroll, - scroll_id: scrollId, - }); - } - } - finally { - await this.clearScroll(scrollId); - } - return results; - } - /** - * Creates a new index. - * - * This methods creates an hidden collection in the provided index to be - * able to list it. - * This methods resolves if the index name does not already exists either as - * private or public index. - * - * @param {String} index - Index name - * - * @returns {Promise} - */ - async createIndex(index) { - this._assertValidIndexAndCollection(index); - let body; - try { - body = await this._client.cat.aliases({ format: "json" }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias: name }) => name); - for (const alias of aliases) { - const indexName = this._extractIndex(alias); - if (index === indexName) { - const indexType = alias[INDEX_PREFIX_POSITION_IN_ALIAS] === PRIVATE_PREFIX - ? "private" - : "public"; - throw kerror.get("services", "storage", "index_already_exists", indexType, index); - } - } - await this._createHiddenCollection(index); - return null; - } - /** - * Creates an empty collection. - * Mappings and settings will be applied if supplied. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} config - mappings ({}), settings ({}) - * - * @returns {Promise} - */ - async createCollection(index, collection, { mappings = {}, settings = {}, } = {}) { - this._assertValidIndexAndCollection(index, collection); - if (collection === HIDDEN_COLLECTION) { - throw kerror.get("services", "storage", "collection_reserved", HIDDEN_COLLECTION); - } - const mutex = new mutex_1.Mutex(`hiddenCollection/create/${index}`); - try { - await mutex.lock(); - if (await this._hasHiddenCollection(index)) { - await this.deleteCollection(index, HIDDEN_COLLECTION); - } - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - finally { - await mutex.unlock(); - } - const esRequest = { - aliases: { - [this._getAlias(index, collection)]: {}, - }, - index: await this._getAvailableIndice(index, collection), - mappings: {}, - settings, - wait_for_active_shards: await this._getWaitForActiveShards(), - }; - this._checkDynamicProperty(mappings); - const exists = await this.hasCollection(index, collection); - if (exists) { - return this.updateCollection(index, collection, { mappings, settings }); - } - this._checkMappings(mappings); - esRequest.mappings = { - _meta: mappings._meta || this._config.commonMapping._meta, - dynamic: mappings.dynamic || this._config.commonMapping.dynamic, - properties: lodash_1.default.merge(mappings.properties, this._config.commonMapping.properties), - }; - esRequest.settings.number_of_replicas = - esRequest.settings.number_of_replicas || - this._config.defaultSettings.number_of_replicas; - esRequest.settings.number_of_shards = - esRequest.settings.number_of_shards || - this._config.defaultSettings.number_of_shards; - try { - await this._client.indices.create(esRequest); - } - catch (error) { - if (lodash_1.default.get(error, "meta.body.error.type") === - "resource_already_exists_exception") { - // race condition: the indice has been created between the "exists" - // check above and this "create" attempt - return null; - } - throw this._esWrapper.formatESError(error); - } - return null; - } - /** - * Retrieves settings definition for index/type - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.<{ settings }>} - */ - async getSettings(index, collection) { - const indice = await this._getIndice(index, collection); - const esRequest = { - index: indice, - }; - (0, debug_1.default)("Get settings: %o", esRequest); - try { - const body = await this._client.indices.getSettings(esRequest); - return body[indice].settings.index; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Retrieves mapping definition for index/type - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} options - includeKuzzleMeta (false) - * - * @returns {Promise.<{ dynamic, _meta, properties }>} - */ - async getMapping(index, collection, { includeKuzzleMeta = false, } = {}) { - const indice = await this._getIndice(index, collection); - const esRequest = { - index: indice, - }; - (0, debug_1.default)("Get mapping: %o", esRequest); - try { - const body = await this._client.indices.getMapping(esRequest); - const properties = includeKuzzleMeta - ? body[indice].mappings.properties - : lodash_1.default.omit(body[indice].mappings.properties, "_kuzzle_info"); - return { - _meta: body[indice].mappings._meta, - dynamic: body[indice].mappings.dynamic, - properties, - }; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Updates a collection mappings and settings - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} config - mappings ({}), settings ({}) - * - * @returns {Promise} - */ - async updateCollection(index, collection, { mappings = {}, settings = {}, } = {}) { - const esRequest = { - index: await this._getIndice(index, collection), - }; - // If either the putMappings or the putSettings operation fail, we need to - // rollback the whole operation. Since mappings can't be rollback, we try to - // update the settings first, then the mappings and we rollback the settings - // if putMappings fail. - let indexSettings; - try { - indexSettings = await this._getSettings(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - if (!lodash_1.default.isEmpty(settings)) { - await this.updateSettings(index, collection, settings); - } - try { - if (!lodash_1.default.isEmpty(mappings)) { - const previousMappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - await this.updateMapping(index, collection, mappings); - if (this._dynamicChanges(previousMappings, mappings)) { - await this.updateSearchIndex(index, collection); - } - } - } - catch (error) { - const allowedSettings = this.getAllowedIndexSettings(indexSettings); - // Rollback to previous settings - if (!lodash_1.default.isEmpty(settings)) { - await this.updateSettings(index, collection, allowedSettings); - } - throw error; - } - return null; - } - /** - * Given index settings we return a new version of index settings - * only with allowed settings that can be set (during update or create index). - * @param indexSettings the index settings - * @returns {{index: *}} a new index settings with only allowed settings. - */ - getAllowedIndexSettings(indexSettings) { - return { - index: lodash_1.default.omit(indexSettings.index, [ - "creation_date", - "provided_name", - "uuid", - "version", - ]), - }; - } - /** - * Sends an empty UpdateByQuery request to update the search index - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @returns {Promise.} {} - */ - async updateSearchIndex(index, collection) { - const esRequest = { - // @cluster: conflicts when two nodes start at the same time - conflicts: "proceed", - index: this._getAlias(index, collection), - refresh: true, - // This operation can take some time: this should be an ES - // background task. And it's preferable to a request timeout when - // processing large indexes. - wait_for_completion: false, - }; - (0, debug_1.default)("UpdateByQuery: %o", esRequest); - try { - await this._client.updateByQuery(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Update a collection mappings - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} mappings - Collection mappings in ES format - * - * @returns {Promise.<{ dynamic, _meta, properties }>} - */ - async updateMapping(index, collection, mappings = {}) { - let esRequest = { - index: this._getAlias(index, collection), - }; - this._checkDynamicProperty(mappings); - const collectionMappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - this._checkMappings(mappings); - esRequest = { - ...esRequest, - _meta: mappings._meta || collectionMappings._meta, - dynamic: mappings.dynamic || collectionMappings.dynamic, - properties: mappings.properties, - }; - (0, debug_1.default)("Update mapping: %o", esRequest); - try { - await this._client.indices.putMapping(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const fullProperties = lodash_1.default.merge(collectionMappings.properties, mappings.properties); - return { - _meta: esRequest._meta, - dynamic: esRequest.dynamic.toString(), - properties: fullProperties, - }; - } - /** - * Updates a collection settings (eg: analyzers) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object} settings - Collection settings in ES format - * - * @returns {Promise} - */ - async updateSettings(index, collection, settings = {}) { - const esRequest = { - index: this._getAlias(index, collection), - }; - await this._client.indices.close(esRequest); - try { - await this._client.indices.putSettings({ ...esRequest, body: settings }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - finally { - await this._client.indices.open(esRequest); - } - return null; - } - /** - * Empties the content of a collection. Keep the existing mapping and settings. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise} - */ - async truncateCollection(index, collection) { - let mappings; - let settings; - const esRequest = { - index: await this._getIndice(index, collection), - }; - try { - mappings = await this.getMapping(index, collection, { - includeKuzzleMeta: true, - }); - settings = await this._getSettings(esRequest); - settings = { - ...settings, - ...this.getAllowedIndexSettings(settings), - }; - await this._client.indices.delete(esRequest); - await this._client.indices.create({ - ...esRequest, - aliases: { - [this._getAlias(index, collection)]: {}, - }, - mappings, - settings, - wait_for_active_shards: await this._getWaitForActiveShards(), - }); - return null; - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Runs several action and document - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents to import - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.<{ items, errors }> - */ - async import(index, collection, documents, { refresh, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection); - const dateNow = Date.now(); - const esRequest = { - operations: documents, - refresh, - timeout, - }; - const kuzzleMeta = { - created: { - author: getKuid(userId), - createdAt: dateNow, - updatedAt: null, - updater: null, - }, - updated: { - updatedAt: dateNow, - updater: getKuid(userId), - }, - }; - assertWellFormedRefresh(esRequest); - this._scriptCheck(documents); - this._setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta); - let body; - try { - body = await this._client.bulk(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const result = { - errors: [], - items: [], - }; - let idx = 0; - /** - * @warning Critical code section - * - * bulk body can contain more than 10K elements - */ - for (let i = 0; i < body.items.length; i++) { - const row = body.items[i]; - const action = Object.keys(row)[0]; - const item = row[action]; - if (item.status >= 400) { - const error = { - _id: item._id, - status: item.status, - }; - // update action contain body in "doc" field - // the delete action is not followed by an action payload - if (action === "update") { - error._source = documents[idx + 1].doc; - error._source._kuzzle_info = undefined; - } - else if (action !== "delete") { - error._source = documents[idx + 1]; - error._source._kuzzle_info = undefined; - } - // ES response does not systematicaly include an error object - // (e.g. delete action with 404 status) - if (item.error) { - error.error = { - reason: item.error.reason, - type: item.error.type, - }; - } - result.errors.push({ [action]: error }); - } - else { - result.items.push({ - [action]: { - _id: item._id, - status: item.status, - }, - }); - } - // the delete action is not followed by an action payload - idx = action === "delete" ? idx + 1 : idx + 2; - } - /* end critical code section */ - return result; - } - /** - * Retrieves the complete list of existing collections in the current index - * - * @param {String} index - Index name - * @param {Object.Boolean} includeHidden - Optional: include HIDDEN_COLLECTION in results - * - * @returns {Promise.} Collection names - */ - async listCollections(index, { includeHidden = false } = {}) { - let body; - try { - body = await this._client.cat.aliases({ format: "json" }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias }) => alias); - const schema = this._extractSchema(aliases, { includeHidden }); - return schema[index] || []; - } - /** - * Retrieves the complete list of indexes - * - * @returns {Promise.} Index names - */ - async listIndexes() { - let body; - try { - body = await this._client.cat.aliases({ format: "json" }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias }) => alias); - const schema = this._extractSchema(aliases); - return Object.keys(schema); - } - /** - * Returns an object containing the list of indexes and collections - * - * @returns {Object.} Object - */ - async getSchema() { - let body; - try { - body = await this._client.cat.aliases({ format: "json" }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = body.map(({ alias }) => alias); - const schema = this._extractSchema(aliases, { includeHidden: true }); - for (const [index, collections] of Object.entries(schema)) { - schema[index] = collections.filter((c) => c !== HIDDEN_COLLECTION); - } - return schema; - } - /** - * Retrieves the complete list of aliases - * - * @returns {Promise.} [ { alias, index, collection, indice } ] - */ - async listAliases() { - let body; - try { - body = await this._client.cat.aliases({ format: "json" }); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - const aliases = []; - for (const { alias, index: indice } of body) { - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix) { - aliases.push({ - alias, - collection: this._extractCollection(alias), - index: this._extractIndex(alias), - indice, - }); - } - } - return aliases; - } - /** - * Deletes a collection - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise} - */ - async deleteCollection(index, collection) { - const indice = await this._getIndice(index, collection); - const esRequest = { - index: indice, - }; - try { - await this._client.indices.delete(esRequest); - const alias = this._getAlias(index, collection); - if (await this._checkIfAliasExists(alias)) { - await this._client.indices.deleteAlias({ - index: indice, - name: alias, - }); - } - await this._createHiddenCollection(index); - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - return null; - } - /** - * Deletes multiple indexes - * - * @param {String[]} indexes - Index names - * - * @returns {Promise.} - */ - async deleteIndexes(indexes = []) { - if (indexes.length === 0) { - return bluebird_1.default.resolve([]); - } - const deleted = new Set(); - try { - const body = await this._client.cat.aliases({ format: "json" }); - const esRequest = body.reduce((request, { alias, index: indice }) => { - const index = this._extractIndex(alias); - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] !== this._indexPrefix || - !indexes.includes(index)) { - return request; - } - deleted.add(index); - request.index.push(indice); - return request; - }, { index: [] }); - if (esRequest.index.length === 0) { - return []; - } - (0, debug_1.default)("Delete indexes: %o", esRequest); - await this._client.indices.delete(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - return Array.from(deleted); - } - /** - * Deletes an index - * - * @param {String} index - Index name - * - * @returns {Promise} - */ - async deleteIndex(index) { - await this.deleteIndexes([index]); - return null; - } - /** - * Forces a refresh on the collection. - * - * /!\ Can lead to some performance issues. - * cf https://www.elastic.co/guide/en/elasticsearch/guide/current/near-real-time.html for more details - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.} { _shards } - */ - async refreshCollection(index, collection) { - const esRequest = { - index: this._getAlias(index, collection), - }; - let body; - try { - body = await this._client.indices.refresh(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - return body; - } - /** - * Returns true if the document exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {String} id - Document ID - * - * @returns {Promise.} - */ - async exists(index, collection, id) { - const esRequest = { - id, - index: this._getAlias(index, collection), - }; - try { - return await this._client.exists(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Returns the list of documents existing with the ids given in the body param - * NB: Due to internal Kuzzle mechanism, can only be called on a single - * index/collection, using the body { ids: [.. } syntax. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Document IDs - * - * @returns {Promise.<{ items: Array<{ _id, _source, _version }>, errors }>} - */ - async mExists(index, collection, ids) { - if (ids.length === 0) { - return { errors: [], item: [] }; - } - const esRequest = { - _source: "false", - docs: ids.map((_id) => ({ _id })), - index: this._getAlias(index, collection), - }; - (0, debug_1.default)("mExists: %o", esRequest); - let body; - try { - body = await this._client.mget(esRequest); // NOSONAR - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - const errors = []; - const items = []; - for (let i = 0; i < body.docs.length; i++) { - const doc = body.docs[i]; - if (!("error" in doc) && doc.found) { - items.push(doc._id); - } - else { - errors.push(doc._id); - } - } - return { errors, items }; - } - /** - * Returns true if the index exists - * - * @param {String} index - Index name - * - * @returns {Promise.} - */ - async hasIndex(index) { - const indexes = await this.listIndexes(); - return indexes.some((idx) => idx === index); - } - /** - * Returns true if the collection exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * - * @returns {Promise.} - */ - async hasCollection(index, collection) { - const collections = await this.listCollections(index); - return collections.some((col) => col === collection); - } - /** - * Returns true if the index has the hidden collection - * - * @param {String} index - Index name - * - * @returns {Promise.} - */ - async _hasHiddenCollection(index) { - const collections = await this.listCollections(index, { - includeHidden: true, - }); - return collections.some((col) => col === HIDDEN_COLLECTION); - } - /** - * Creates multiple documents at once. - * If a content has no id, one is automatically generated and assigned to it. - * If a content has a specified identifier, it is rejected if it already exists - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mCreate(index, collection, documents, { refresh, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection), kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { prepareMGet: true }); - // prepare the mget request, but only for document having a specified id - const body = documentsToGet.length > 0 - ? await this._client.mget({ - docs: documentsToGet, - index: alias, - }) - : { docs: [] }; - const existingDocuments = body.docs; - const esRequest = { - index: alias, - operations: [], - refresh, - timeout, - }; - const toImport = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0, idx = 0; i < extractedDocuments.length; i++) { - const document = extractedDocuments[i]; - // Documents are retrieved in the same order than we got them from user - if (typeof document._id === "string" && existingDocuments[idx]) { - const doc = existingDocuments[idx]; - if (!("error" in doc) && doc.found) { - document._source._kuzzle_info = undefined; - rejected.push({ - document: { - _id: document._id, - body: document._source, - }, - reason: "document already exists", - status: 400, - }); - } - else { - esRequest.operations.push({ - index: { - _id: document._id, - _index: alias, - }, - }); - esRequest.operations.push(document._source); - toImport.push(document); - } - idx++; - } - else { - esRequest.operations.push({ index: { _index: alias } }); - esRequest.operations.push(document._source); - toImport.push(document); - } - } - /* end critical code section */ - return this._mExecute(esRequest, toImport, rejected); - } - /** - * Creates or replaces multiple documents at once. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null), injectKuzzleMeta (false), limits (true) - * - * @returns {Promise.<{ items, errors }> - */ - async mCreateOrReplace(index, collection, documents, { refresh, timeout, userId = null, injectKuzzleMeta = true, limits = true, source = true, } = {}) { - let kuzzleMeta = {}; - if (injectKuzzleMeta) { - kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }; - } - const alias = this._getAlias(index, collection); - const esRequest = { - index: alias, - operations: [], - refresh, - timeout, - }; - const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); - esRequest.operations = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.operations.push({ - index: { - _id: extractedDocuments[i]._id, - _index: alias, - }, - }); - esRequest.operations.push(extractedDocuments[i]._source); - } - /* end critical code section */ - return this._mExecute(esRequest, extractedDocuments, rejected, { - limits, - source, - }); - } - /** - * Updates multiple documents with one request - * Replacements are rejected if targeted documents do not exist - * (like with the normal "update" method) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), retryOnConflict (0), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mUpdate(index, collection, documents, { refresh = undefined, retryOnConflict = 0, timeout = undefined, userId = null, } = {}) { - const alias = this._getAlias(index, collection), toImport = [], esRequest = { - index: alias, - operations: [], - refresh, - timeout, - }, kuzzleMeta = { - _kuzzle_info: { - updatedAt: Date.now(), - updater: getKuid(userId), - }, - }, { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta); - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - const extractedDocument = extractedDocuments[i]; - if (typeof extractedDocument._id === "string") { - esRequest.operations.push({ - update: { - _id: extractedDocument._id, - _index: alias, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }, - }); - // _source: true => makes ES return the updated document source in the - // response. Required by the real-time notifier component - esRequest.operations.push({ - _source: true, - doc: extractedDocument._source, - }); - toImport.push(extractedDocument); - } - else { - extractedDocument._source._kuzzle_info = undefined; - rejected.push({ - document: { - _id: extractedDocument._id, - body: extractedDocument._source, - }, - reason: "document _id must be a string", - status: 400, - }); - } - } - /* end critical code section */ - const response = await this._mExecute(esRequest, toImport, rejected); - // with _source: true, ES returns the updated document in - // response.result.get._source - // => we replace response.result._source with it so that the notifier - // module can seamlessly process all kind of m* response* - response.items = response.items.map((item) => ({ - _id: item._id, - _source: item.get._source, - _version: item._version, - status: item.status, - })); - return response; - } - /** - * Creates or replaces multiple documents at once. - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - refresh (undefined), retryOnConflict (0), timeout (undefined), userId (null) - * - * @returns {Promise.<{ items, errors }> - */ - async mUpsert(index, collection, documents, { refresh, retryOnConflict = 0, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection); - const esRequest = { - operations: [], - refresh, - timeout, - }; - const user = getKuid(userId); - const now = Date.now(); - const kuzzleMeta = { - doc: { - _kuzzle_info: { - updatedAt: now, - updater: user, - }, - }, - upsert: { - _kuzzle_info: { - author: user, - createdAt: now, - }, - }, - }; - const { rejected, extractedDocuments } = this._extractMDocuments(documents, kuzzleMeta, { - prepareMUpsert: true, - requireId: true, - }); - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - esRequest.operations.push({ - update: { - _id: extractedDocuments[i]._id, - _index: alias, - _source: true, - retry_on_conflict: retryOnConflict || this._config.defaults.onUpdateConflictRetries, - }, - }, { - doc: extractedDocuments[i]._source.changes, - upsert: extractedDocuments[i]._source.default, - }); - // _source: true - // Makes ES return the updated document source in the response. - // Required by the real-time notifier component - } - /* end critical code section */ - const response = await this._mExecute(esRequest, extractedDocuments, rejected); - // with _source: true, ES returns the updated document in - // response.result.get._source - // => we replace response.result._source with it so that the notifier - // module can seamlessly process all kind of m* response* - response.items = response.items.map((item) => ({ - _id: item._id, - _source: item.get._source, - _version: item._version, - created: item.result === "created", // Needed by the notifier - status: item.status, - })); - return response; - } - /** - * Replaces multiple documents at once. - * Replacements are rejected if targeted documents do not exist - * (like with the normal "replace" method) - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Object[]} documents - Documents - * @param {Object} options - timeout (undefined), refresh (undefined), userId (null) - * - * @returns {Promise.} { items, errors } - */ - async mReplace(index, collection, documents, { refresh, timeout, userId = null, } = {}) { - const alias = this._getAlias(index, collection), kuzzleMeta = { - _kuzzle_info: { - author: getKuid(userId), - createdAt: Date.now(), - updatedAt: null, - updater: null, - }, - }, { rejected, extractedDocuments, documentsToGet } = this._extractMDocuments(documents, kuzzleMeta, { - prepareMGet: true, - requireId: true, - }); - if (documentsToGet.length < 1) { - return { errors: rejected, items: [] }; - } - const body = await this._client.mget({ - docs: documentsToGet, - index: alias, - }); - const existingDocuments = body.docs; - const esRequest = { - operations: [], - refresh, - timeout, - }; - const toImport = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < extractedDocuments.length; i++) { - const document = extractedDocuments[i]; - // Documents are retrieved in the same order than we got them from user - const doc = existingDocuments[i]; - if (!("error" in doc) && doc?.found) { - esRequest.operations.push({ - index: { - _id: document._id, - _index: alias, - }, - }); - esRequest.operations.push(document._source); - toImport.push(document); - } - else { - document._source._kuzzle_info = undefined; - rejected.push({ - document: { - _id: document._id, - body: document._source, - }, - reason: "document not found", - status: 404, - }); - } - } - /* end critical code section */ - return this._mExecute(esRequest, toImport, rejected); - } - /** - * Deletes multiple documents with one request - * - * @param {String} index - Index name - * @param {String} collection - Collection name - * @param {Array.} ids - Documents IDs - * @param {Object} options - timeout (undefined), refresh (undefined) - * - * @returns {Promise.<{ documents, errors }> - */ - async mDelete(index, collection, ids, { refresh, } = {}) { - const query = { ids: { values: [] } }; - const validIds = []; - const partialErrors = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < ids.length; i++) { - const _id = ids[i]; - if (typeof _id === "string") { - validIds.push(_id); - } - else { - partialErrors.push({ - _id, - reason: "document _id must be a string", - status: 400, - }); - } - } - /* end critical code section */ - await this.refreshCollection(index, collection); - const { items } = await this.mGet(index, collection, validIds); - let idx = 0; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < validIds.length; i++) { - const validId = validIds[i]; - const item = items[idx]; - if (item && item._id === validId) { - query.ids.values.push(validId); - idx++; - } - else { - partialErrors.push({ - _id: validId, - reason: "document not found", - status: 404, - }); - } - } - /* end critical code section */ - // @todo duplicated query to get documents body, mGet here and search in - // deleteByQuery - const { documents } = await this.deleteByQuery(index, collection, query, { - refresh, - }); - return { documents, errors: partialErrors }; - } - /** - * Executes an ES request prepared by mcreate, mupdate, mreplace, mdelete or mwriteDocuments - * Returns a standardized ES response object, containing the list of - * successfully performed operations, and the rejected ones - * - * @param {Object} esRequest - Elasticsearch request - * @param {Object[]} documents - Document sources (format: {_id, _source}) - * @param {Object[]} partialErrors - pre-rejected documents - * @param {Object} options - limits (true) - * - * @returns {Promise.} results - */ - async _mExecute(esRequest, documents, partialErrors = [], { limits = true, source = true } = {}) { - assertWellFormedRefresh(esRequest); - if (this._hasExceededLimit(limits, documents)) { - return kerror.reject("services", "storage", "write_limit_exceeded"); - } - let body = { items: [] }; - if (documents.length > 0) { - try { - body = await this._client.bulk(esRequest); - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - const successes = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < body.items.length; i++) { - const item = body.items[i]; - const result = item[Object.keys(item)[0]]; - if (result.status >= 400) { - if (result.status === 404) { - partialErrors.push({ - document: { - _id: documents[i]._id, - body: documents[i]._source, - }, - reason: "document not found", - status: result.status, - }); - } - else { - partialErrors.push({ - document: documents[i], - reason: result.error.reason, - status: result.status, - }); - } - } - else { - successes.push({ - _id: result._id, - _source: source ? documents[i]._source : undefined, - _version: result._version, - created: result.result === "created", - get: result.get, - result: result.result, - status: result.status, // used by mUpdate to get the full document body - }); - } - } - /* end critical code section */ - return { - errors: partialErrors, // @todo rename items to documents - items: successes, - }; - } - /** - * Extracts, injects metadata and validates documents contained - * in a Request - * - * Used by mCreate, mUpdate, mUpsert, mReplace and mCreateOrReplace - * - * @param {Object[]} documents - Documents - * @param {Object} metadata - Kuzzle metadata - * @param {Object} options - prepareMGet (false), requireId (false) - * - * @returns {Object} { rejected, extractedDocuments, documentsToGet } - */ - _extractMDocuments(documents, metadata, { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}) { - const rejected = []; - const extractedDocuments = []; - const documentsToGet = []; - /** - * @warning Critical code section - * - * request can contain more than 10K elements - */ - for (let i = 0; i < documents.length; i++) { - const document = documents[i]; - if (!(0, safeObject_1.isPlainObject)(document.body) && !prepareMUpsert) { - rejected.push({ - document, - reason: "document body must be an object", - status: 400, - }); - } - else if (!(0, safeObject_1.isPlainObject)(document.changes) && prepareMUpsert) { - rejected.push({ - document, - reason: "document changes must be an object", - status: 400, - }); - } - else if (prepareMUpsert && - document.default && - !(0, safeObject_1.isPlainObject)(document.default)) { - rejected.push({ - document, - reason: "document default must be an object", - status: 400, - }); - } - else if (requireId && typeof document._id !== "string") { - rejected.push({ - document, - reason: "document _id must be a string", - status: 400, - }); - } - else { - this._processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet); - } - } - /* end critical code section */ - return { documentsToGet, extractedDocuments, rejected }; - } - _hasExceededLimit(limits, documents) { - return (limits && - documents.length > global.kuzzle.config.limits.documentsWriteCount); - } - _processExtract(prepareMUpsert, prepareMGet, metadata, document, extractedDocuments, documentsToGet) { - let extractedDocument; - if (prepareMUpsert) { - extractedDocument = { - _source: { - // Do not use destructuring, it's 10x slower - changes: Object.assign({}, metadata.doc, document.changes), - default: Object.assign({}, metadata.upsert, document.changes, document.default), - }, - }; - } - else { - extractedDocument = { - // Do not use destructuring, it's 10x slower - _source: Object.assign({}, metadata, document.body), - }; - } - if (document._id) { - extractedDocument._id = document._id; - } - extractedDocuments.push(extractedDocument); - if (prepareMGet && typeof document._id === "string") { - documentsToGet.push({ - _id: document._id, - _source: false, - }); - } - } - /** - * Throws an error if the provided mapping is invalid - * - * @param {Object} mapping - * @throws - */ - _checkMappings(mapping, path = [], check = true) { - const properties = Object.keys(mapping); - const mappingProperties = path.length === 0 - ? ROOT_MAPPING_PROPERTIES - : [...ROOT_MAPPING_PROPERTIES, ...CHILD_MAPPING_PROPERTIES]; - for (const property of properties) { - if (check && !mappingProperties.includes(property)) { - const currentPath = [...path, property].join("."); - throw kerror.get("services", "storage", "invalid_mapping", currentPath, (0, didYouMean_1.default)(property, mappingProperties)); - } - if (property === "properties") { - // type definition level, we don't check - this._checkMappings(mapping[property], [...path, "properties"], false); - } - else if (mapping[property]?.properties) { - // root properties level, check for "properties", "dynamic" and "_meta" - this._checkMappings(mapping[property], [...path, property], true); - } - } - } - /** - * Given index + collection, returns the associated alias name. - * Prefer this function to `_getIndice` and `_getAvailableIndice` whenever it is possible. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Alias name (eg: '@&nepali.liia') - */ - _getAlias(index, collection) { - return `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - } - /** - * Given an alias name, returns the associated index name. - */ - async _checkIfAliasExists(aliasName) { - return this._client.indices.existsAlias({ - name: aliasName, - }); - } - /** - * Given index + collection, returns the associated indice name. - * Use this function if ES does not accept aliases in the request. Otherwise use `_getAlias`. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Indice name (eg: '&nepali.liia') - * @throws If there is not exactly one indice associated - */ - async _getIndice(index, collection) { - const alias = `${ALIAS_PREFIX}${this._indexPrefix}${index}${NAME_SEPARATOR}${collection}`; - const body = await this._client.cat.aliases({ - format: "json", - name: alias, - }); - if (body.length < 1) { - throw kerror.get("services", "storage", "unknown_index_collection"); - } - else if (body.length > 1) { - throw kerror.get("services", "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, '"indices"'); - } - return body[0].index; - } - /** - * Given an ES Request returns the settings of the corresponding indice. - * - * @param esRequest the ES Request with wanted settings. - * @return {Promise<*>} the settings of the indice. - * @private - */ - async _getSettings(esRequest) { - const response = await this._client.indices.getSettings(esRequest); - const index = esRequest.index; - return response[index].settings; - } - /** - * Given index + collection, returns an available indice name. - * Use this function when creating the associated indice. Otherwise use `_getAlias`. - * - * @param {String} index - * @param {String} collection - * - * @returns {String} Available indice name (eg: '&nepali.liia2') - */ - async _getAvailableIndice(index, collection) { - let indice = this._getAlias(index, collection).substring(INDEX_PREFIX_POSITION_IN_ALIAS); - if (!(await this._client.indices.exists({ index: indice }))) { - return indice; - } - let notAvailable; - let suffix; - do { - suffix = `.${(0, name_generator_1.randomNumber)(100000)}`; - const overflow = Buffer.from(indice + suffix).length - 255; - if (overflow > 0) { - const indiceBuffer = Buffer.from(indice); - indice = indiceBuffer - .subarray(0, indiceBuffer.length - overflow) - .toString(); - } - notAvailable = await this._client.indices.exists({ - index: indice + suffix, - }); - } while (notAvailable); - return indice + suffix; - } - /** - * Given an indice, returns the associated alias name. - * - * @param {String} indice - * - * @returns {String} Alias name (eg: '@&nepali.liia') - * @throws If there is not exactly one alias associated that is prefixed with @ - */ - async _getAliasFromIndice(indice) { - const body = await this._client.indices.getAlias({ index: indice }); - const aliases = Object.keys(body[indice].aliases).filter((alias) => alias.startsWith(ALIAS_PREFIX)); - if (aliases.length < 1) { - throw kerror.get("services", "storage", "unknown_index_collection"); - } - return aliases; - } - /** - * Check for each indice whether it has an alias or not. - * When the latter is missing, create one based on the indice name. - * - * This check avoids a breaking change for those who were using Kuzzle before - * alias attribution for each indice turned into a standard (appear in 2.14.0). - */ - async generateMissingAliases() { - try { - const body = await this._client.cat.indices({ format: "json" }); - const indices = body.map(({ index: indice }) => indice); - const aliases = await this.listAliases(); - const indicesWithoutAlias = indices.filter((indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice)); - const esRequest = { body: { actions: [] } }; - for (const indice of indicesWithoutAlias) { - esRequest.body.actions.push({ - add: { alias: `${ALIAS_PREFIX}${indice}`, index: indice }, - }); - } - if (esRequest.body.actions.length > 0) { - await this._client.indices.updateAliases(esRequest); - } - } - catch (error) { - throw this._esWrapper.formatESError(error); - } - } - /** - * Throws if index or collection includes forbidden characters - * - * @param {String} index - * @param {String} collection - */ - _assertValidIndexAndCollection(index, collection = null) { - if (!this.isIndexNameValid(index)) { - throw kerror.get("services", "storage", "invalid_index_name", index); - } - if (collection !== null && !this.isCollectionNameValid(collection)) { - throw kerror.get("services", "storage", "invalid_collection_name", collection); - } - } - /** - * Given an alias, extract the associated index. - * - * @param {String} alias - * - * @returns {String} Index name - */ - _extractIndex(alias) { - return alias.substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1); - } - /** - * Given an alias, extract the associated collection. - * - * @param {String} alias - * - * @returns {String} Collection name - */ - _extractCollection(alias) { - const separatorPos = alias.indexOf(NAME_SEPARATOR); - return alias.substr(separatorPos + 1, alias.length); - } - /** - * Given aliases, extract indexes and collections. - * - * @param {Array.} aliases - * @param {Object.Boolean} includeHidden Only refers to `HIDDEN_COLLECTION` occurences. An empty index will still be listed. Default to `false`. - * - * @returns {Object.} Indexes as key and an array of their collections as value - */ - _extractSchema(aliases, { includeHidden = false } = {}) { - const schema = {}; - for (const alias of aliases) { - const [indexName, collectionName] = alias - .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) - .split(NAME_SEPARATOR); - if (alias[INDEX_PREFIX_POSITION_IN_ALIAS] === this._indexPrefix && - (collectionName !== HIDDEN_COLLECTION || includeHidden)) { - if (!schema[indexName]) { - schema[indexName] = []; - } - if (!schema[indexName].includes(collectionName)) { - schema[indexName].push(collectionName); - } - } - } - return schema; - } - /** - * Creates the hidden collection on the provided index if it does not already - * exists - * - * @param {String} index Index name - */ - async _createHiddenCollection(index) { - const mutex = new mutex_1.Mutex(`hiddenCollection/${index}`); - try { - await mutex.lock(); - if (await this._hasHiddenCollection(index)) { - return; - } - const esRequest = { - aliases: { - [this._getAlias(index, HIDDEN_COLLECTION)]: {}, - }, - index: await this._getAvailableIndice(index, HIDDEN_COLLECTION), - settings: { - number_of_replicas: this._config.defaultSettings.number_of_replicas, - number_of_shards: this._config.defaultSettings.number_of_shards, - }, - wait_for_active_shards: await this._getWaitForActiveShards(), - }; - await this._client.indices.create(esRequest); - } - catch (e) { - throw this._esWrapper.formatESError(e); - } - finally { - await mutex.unlock(); - } - } - /** - * We need to always wait for a minimal number of shards to be available - * before answering to the client. This is to avoid Elasticsearch node - * to return a 404 Not Found error when the client tries to index a - * document in the index. - * To find the best value for this setting, we need to take into account - * the number of nodes in the cluster and the number of shards per index. - */ - async _getWaitForActiveShards() { - const body = await this._client.cat.nodes({ format: "json" }); - const numberOfNodes = body.length; - if (numberOfNodes > 1) { - return "all"; - } - return 1; - } - /** - * Scroll indice in elasticsearch and return all document that match the filter - * /!\ throws a write_limit_exceed error: this method is intended to be used - * by deleteByQuery and updateByQuery - * - * @param {Object} esRequest - Search request body - * - * @returns {Promise.} resolve to an array of documents - */ - async _getAllDocumentsFromQuery(esRequest) { - let { hits, _scroll_id } = await this._client.search(esRequest); - const totalHitsValue = this._getHitsTotalValue(hits); - if (totalHitsValue > global.kuzzle.config.limits.documentsWriteCount) { - throw kerror.get("services", "storage", "write_limit_exceeded"); - } - let documents = hits.hits.map((h) => ({ - _id: h._id, - _source: h._source, - body: {}, - })); - while (totalHitsValue !== documents.length) { - ({ hits, _scroll_id } = await this._client.scroll({ - scroll: esRequest.scroll, - scroll_id: _scroll_id, - })); - documents = documents.concat(hits.hits.map((h) => ({ - _id: h._id, - _source: h._source, - body: {}, - }))); - } - await this.clearScroll(_scroll_id); - return documents; - } - /** - * Clean and normalize the searchBody - * Ensure only allowed parameters are passed to ES - * - * @param {Object} searchBody - ES search body (with query, aggregations, sort, etc) - */ - _sanitizeSearchBody(searchBody) { - // Only allow a whitelist of top level properties - for (const key of Object.keys(searchBody)) { - if (searchBody[key] !== undefined && !this.searchBodyKeys.includes(key)) { - throw kerror.get("services", "storage", "invalid_search_query", key); - } - } - // Ensure that the body does not include a script - this._scriptCheck(searchBody); - // Avoid empty queries that causes ES to respond with an error. - // Empty queries are turned into match_all queries - if (lodash_1.default.isEmpty(searchBody.query)) { - searchBody.query = { match_all: {} }; - } - return searchBody; - } - /** - * Throw if a script is used in the query. - * - * Only Stored Scripts are accepted - * - * @param {Object} object - */ - _scriptCheck(object) { - for (const [key, value] of Object.entries(object)) { - if (this.scriptKeys.includes(key)) { - for (const scriptArg of Object.keys(value)) { - if (!this.scriptAllowedArgs.includes(scriptArg)) { - throw kerror.get("services", "storage", "invalid_query_keyword", `${key}.${scriptArg}`); - } - } - } - // Every object must be checked here, even the ones nested into an array - else if (typeof value === "object" && value !== null) { - this._scriptCheck(value); - } - } - } - /** - * Checks if a collection name is valid - * @param {string} name - * @returns {Boolean} - */ - isCollectionNameValid(name) { - return _isObjectNameValid(name); - } - /** - * Checks if a collection name is valid - * @param {string} name - * @returns {Boolean} - */ - isIndexNameValid(name) { - return _isObjectNameValid(name); - } - /** - * Clears an allocated scroll - * @param {[type]} id [description] - * @returns {[type]} [description] - */ - async clearScroll(id) { - if (id) { - (0, debug_1.default)("clearing scroll: %s", id); - await this._client.clearScroll({ scroll_id: id }); - } - } - /** - * Loads a configuration value from services.storageEngine and assert a valid - * ms format. - * - * @param {String} key - relative path to the key in configuration - * - * @returns {Number} milliseconds - */ - _loadMsConfig(key) { - const configValue = lodash_1.default.get(this._config, key); - (0, assert_1.default)(typeof configValue === "string", `services.storageEngine.${key} must be a string.`); - const parsedValue = (0, ms_1.default)(configValue); - (0, assert_1.default)(typeof parsedValue === "number", `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`); - return parsedValue; - } - /** - * Returns true if one of the mappings dynamic property changes value from - * false to true - */ - _dynamicChanges(previousMappings, newMappings) { - const previousValues = findDynamic(previousMappings); - for (const [path, previousValue] of Object.entries(previousValues)) { - if (previousValue.toString() !== "false") { - continue; - } - const newValue = lodash_1.default.get(newMappings, path); - if (newValue && newValue.toString() !== "false") { - return true; - } - } - return false; - } - async waitForElasticsearch() { - if (esState !== esStateEnum.NONE) { - while (esState !== esStateEnum.OK) { - await bluebird_1.default.delay(1000); - } - return; - } - esState = esStateEnum.AWAITING; - global.kuzzle.log.info("[ℹ] Trying to connect to Elasticsearch..."); - while (esState !== esStateEnum.OK) { - try { - // Wait for at least 1 shard to be initialized - const health = await this._client.cluster.health({ - wait_for_no_initializing_shards: true, - }); - if (health.number_of_pending_tasks === 0) { - global.kuzzle.log.info("[✔] Elasticsearch is ready"); - esState = esStateEnum.OK; - } - else { - global.kuzzle.log.info(`[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining`); - await bluebird_1.default.delay(1000); - } - } - catch (e) { - await bluebird_1.default.delay(1000); - } - } - } - /** - * Checks if the dynamic properties are correct - */ - _checkDynamicProperty(mappings) { - const dynamicProperties = findDynamic(mappings); - for (const [path, value] of Object.entries(dynamicProperties)) { - // Prevent common mistake - if (typeof value === "boolean") { - lodash_1.default.set(mappings, path, value.toString()); - } - else if (typeof value !== "string") { - throw kerror.get("services", "storage", "invalid_mapping", path, "Dynamic property value should be a string."); - } - if (!DYNAMIC_PROPERTY_VALUES.includes(value.toString())) { - throw kerror.get("services", "storage", "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join('", "')}"`); - } - } - } - _setLastActionToKuzzleMeta(esRequest, alias, kuzzleMeta) { - /** - * @warning Critical code section - * - * bulk body can contain more than 10K elements - */ - let lastAction = ""; - const actionNames = ["index", "create", "update", "delete"]; - for (let i = 0; i < esRequest.operations.length; i++) { - const item = esRequest.operations[i]; - const action = Object.keys(item)[0]; - if (actionNames.indexOf(action) !== -1) { - lastAction = action; - item[action]._index = alias; - if (item[action]?._type) { - item[action]._type = undefined; - } - } - else if (lastAction === "index" || lastAction === "create") { - item._kuzzle_info = kuzzleMeta.created; - } - else if (lastAction === "update") { - this._setLastActionToKuzzleMetaUpdate(item, kuzzleMeta); - } - } - /* end critical code section */ - } - _setLastActionToKuzzleMetaUpdate(item, kuzzleMeta) { - for (const prop of ["doc", "upsert"]) { - if ((0, safeObject_1.isPlainObject)(item[prop])) { - item[prop]._kuzzle_info = kuzzleMeta.updated; - } - } - } - _getHitsTotalValue(hits) { - if (typeof hits.total === "number") { - return hits.total; - } - return hits.total.value; - } -} -exports.ES8 = ES8; -/** - * Finds paths and values of mappings dynamic properties - * - * @example - * - * findDynamic(mappings); - * { - * "properties.metadata.dynamic": "true", - * "properties.user.properties.address.dynamic": "strict" - * } - */ -function findDynamic(mappings, path = [], results = {}) { - if (mappings.dynamic !== undefined) { - results[path.concat("dynamic").join(".")] = mappings.dynamic; - } - for (const [key, value] of Object.entries(mappings)) { - if ((0, safeObject_1.isPlainObject)(value)) { - findDynamic(value, path.concat(key), results); - } - } - return results; -} -/** - * Forbids the use of the _routing ES option - * - * @param {Object} esRequest - * @throws - */ -function assertNoRouting(esRequest) { - if (esRequest._routing) { - throw kerror.get("services", "storage", "no_routing"); - } -} -/** - * Checks if the optional "refresh" argument is well-formed - * - * @param {Object} esRequest - * @throws - */ -function assertWellFormedRefresh(esRequest) { - if (!["wait_for", "false", false, undefined].includes(esRequest.refresh)) { - throw kerror.get("services", "storage", "invalid_argument", "refresh", '"wait_for", false'); - } -} -function getKuid(userId) { - if (!userId) { - return null; - } - return String(userId); -} -/** - * Checks if an index or collection name is valid - * - * @see https://www.elastic.co/guide/en/elasticsearch/reference/7.4/indices-create-index.html - * - * Beware of the length check: ES allows indice names up to 255 bytes, but since - * in Kuzzle we emulate collections as indices, we have to make sure - * that the privacy prefix, the index name, the separator and the collection - * name ALL fit within the 255-bytes limit of Elasticsearch. The simplest way - * is to limit index and collection names to 126 bytes and document that - * limitation (prefix(1) + index(1..126) + sep(1) + collection(1..126) = 4..254) - * - * @param {string} name - * @returns {Boolean} - */ -function _isObjectNameValid(name) { - if (typeof name !== "string" || name.length === 0) { - return false; - } - if (name.toLowerCase() !== name) { - return false; - } - if (Buffer.from(name).length > 126) { - return false; - } - if (name === "_all") { - return false; - } - let valid = true; - for (let i = 0; valid && i < FORBIDDEN_CHARS.length; i++) { - valid = !name.includes(FORBIDDEN_CHARS[i]); - } - return valid; -} -//# sourceMappingURL=elasticsearch.js.map \ No newline at end of file diff --git a/lib/types/storage/7/Elasticsearch.js b/lib/types/storage/7/Elasticsearch.js deleted file mode 100644 index 12de03811f..0000000000 --- a/lib/types/storage/7/Elasticsearch.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Elasticsearch.js.map \ No newline at end of file diff --git a/lib/types/storage/8/Elasticsearch.js b/lib/types/storage/8/Elasticsearch.js deleted file mode 100644 index 12de03811f..0000000000 --- a/lib/types/storage/8/Elasticsearch.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Elasticsearch.js.map \ No newline at end of file From bc95c25bbbd1152e4fecb4e7e79d756a9f6f6d60 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 14 Feb 2024 15:02:01 +0100 Subject: [PATCH 15/59] chore(pipeline): remove the matrix for the lint, it is unnecessary --- .github/workflows/workflow.yaml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index e2964ef829..b388b933ce 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -48,12 +48,9 @@ jobs: run: ./.ci/scripts/check-error-codes-documentation.sh lint: - name: Lint - Node.js LTS ${{ matrix.node-version }} + name: Lint - Node.js runs-on: ubuntu-22.04 needs: [prepare-matrix] - strategy: - matrix: - node-version: ${{ fromJson(needs.prepare-matrix.outputs.matrix).node-version }} steps: - name: Checkout project uses: actions/checkout@v3 @@ -61,10 +58,10 @@ jobs: - name: Install additional libraries uses: ./.github/actions/install-packages - - name: Node version ${{ matrix.node-version }} + - name: Node version ${{ env.NODE_LTS_CURRENT_VERSION }} uses: actions/setup-node@v4 with: - node-version: ${{ matrix.node-version }} + node-version: ${{ env.NODE_LTS_CURRENT_VERSION }} - name: Install depedencies run: npm ci From b426a0193a7953ce58a65e1d9368c98baa53df79 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 14 Feb 2024 15:26:50 +0100 Subject: [PATCH 16/59] style(linting): lint the application --- lib/core/backend/backendStorage.ts | 2 +- lib/core/plugin/pluginContext.ts | 20 ++-- lib/service/storage/7/elasticsearch.ts | 148 ++++++++++++------------- lib/service/storage/8/elasticsearch.ts | 143 ++++++++++++------------ lib/service/storage/elasticsearch.ts | 4 +- lib/types/storage/8/Elasticsearch.ts | 2 +- 6 files changed, 160 insertions(+), 159 deletions(-) diff --git a/lib/core/backend/backendStorage.ts b/lib/core/backend/backendStorage.ts index 5036dcba70..949e3fa119 100644 --- a/lib/core/backend/backendStorage.ts +++ b/lib/core/backend/backendStorage.ts @@ -58,7 +58,7 @@ export class BackendStorage extends ApplicationManager { getElasticsearchClient(clientConfig?: JSONObject): any { return Elasticsearch.buildClient( { ...this._kuzzle.config.services.storageEngine.client, ...clientConfig }, - this._kuzzle.config.services.storageEngine.majorVersion + this._kuzzle.config.services.storageEngine.majorVersion, ); } } diff --git a/lib/core/plugin/pluginContext.ts b/lib/core/plugin/pluginContext.ts index ad0a883e37..18bbde3eee 100644 --- a/lib/core/plugin/pluginContext.ts +++ b/lib/core/plugin/pluginContext.ts @@ -129,7 +129,7 @@ export class PluginContext { connectionId: string, index: string, collection: string, - filters: JSONObject + filters: JSONObject, ) => Promise<{ roomId: string }>; /** @@ -138,7 +138,7 @@ export class PluginContext { unregister: ( connectionId: string, roomId: string, - notify: boolean + notify: boolean, ) => Promise; }; @@ -282,7 +282,7 @@ export class PluginContext { // eslint-disable-next-line no-inner-declarations function PluginContextRepository( collection: string, - ObjectConstructor: any = null + ObjectConstructor: any = null, ) { if (!collection) { throw contextError.get("missing_collection"); @@ -306,7 +306,7 @@ export class PluginContext { function PluginContextESClient(): any { return Elasticsearch.buildClient( - global.kuzzle.config.services.storageEngine.client + global.kuzzle.config.services.storageEngine.client, ); } @@ -317,7 +317,7 @@ export class PluginContext { Mutex: Mutex, Repository: PluginContextRepository as unknown as new ( collection: string, - objectConstructor: any + objectConstructor: any, ) => Repository, Request: instantiateRequest as any, RequestContext: RequestContext as any, @@ -367,7 +367,7 @@ export class PluginContext { }, { connectionId: connectionId, - } + }, ); return global.kuzzle.ask("core:realtime:subscribe", request); }, @@ -376,17 +376,17 @@ export class PluginContext { "core:realtime:unsubscribe", connectionId, roomId, - notify + notify, ), }, trigger: (eventName, payload) => global.kuzzle.pipe(`plugin-${pluginName}:${eventName}`, payload), validation: { addType: global.kuzzle.validation.addType.bind( - global.kuzzle.validation + global.kuzzle.validation, ), validate: global.kuzzle.validation.validate.bind( - global.kuzzle.validation + global.kuzzle.validation, ), }, }; @@ -421,7 +421,7 @@ function execute(request, callback) { ["subscribe", "unsubscribe"].includes(request.input.action) ) { return promback.reject( - contextError.get("unavailable_realtime", request.input.action) + contextError.get("unavailable_realtime", request.input.action), ); } diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts index 5354a6a1cb..809aa1185c 100644 --- a/lib/service/storage/7/elasticsearch.ts +++ b/lib/service/storage/7/elasticsearch.ts @@ -169,7 +169,7 @@ export class ES7 { "Your dynamic mapping policy is set to 'true' for new fields.", "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n") + ].join("\n"), ); } @@ -191,7 +191,7 @@ export class ES7 { "services", "storage", "version_mismatch", - version.number + version.number, ); } @@ -330,14 +330,14 @@ export class ES7 { "services", "storage", "scroll_duration_too_great", - _scrollTTL + _scrollTTL, ); } } const stringifiedScrollInfo = await global.kuzzle.ask( "core:cache:internal:get", - cacheKey + cacheKey, ); if (!stringifiedScrollInfo) { @@ -362,7 +362,7 @@ export class ES7 { JSON.stringify(scrollInfo), { ttl: ms(_scrollTTL) || this.scrollTTL, - } + }, ); } @@ -404,7 +404,7 @@ export class ES7 { from?: number; size?: number; scroll?: string; - } = {} + } = {}, ) { let esIndexes: any; @@ -440,7 +440,7 @@ export class ES7 { "services", "storage", "scroll_duration_too_great", - scroll + scroll, ); } } @@ -464,7 +464,7 @@ export class ES7 { index, targets, }), - { ttl } + { ttl }, ); body.remaining = body.hits.total.value - body.hits.hits.length; @@ -564,7 +564,7 @@ export class ES7 { for (const [name, innerHit] of Object.entries(innerHits)) { formattedInnerHits[name] = await Bluebird.map( (innerHit as any).hits.hits, - formatHit + formatHit, ); } return formattedInnerHits; @@ -725,7 +725,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { assertIsObject(content); @@ -789,7 +789,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest = { body: content, @@ -853,7 +853,7 @@ export class ES7 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest: RequestParams.Update> = { _source: "true", @@ -919,7 +919,7 @@ export class ES7 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest: RequestParams.Update> = { _source: "true", @@ -992,7 +992,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -1025,7 +1025,7 @@ export class ES7 { "not_found", id, index, - collection + collection, ); } @@ -1061,7 +1061,7 @@ export class ES7 { refresh, }: { refresh?: boolean | "wait_for"; - } = {} + } = {}, ) { const esRequest = { id, @@ -1109,7 +1109,7 @@ export class ES7 { refresh?: boolean | "wait_for"; size?: number; fetch?: boolean; - } = {} + } = {}, ) { const esRequest: RequestParams.DeleteByQuery> = { body: this._sanitizeSearchBody({ query }), @@ -1171,7 +1171,7 @@ export class ES7 { }: { refresh?: boolean | "wait_for"; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -1241,7 +1241,7 @@ export class ES7 { refresh?: boolean | "wait_for"; size?: number; userId?: string; - } = {} + } = {}, ) { try { const esRequest = { @@ -1264,7 +1264,7 @@ export class ES7 { index, collection, documents, - { refresh, userId } + { refresh, userId }, ); return { @@ -1296,7 +1296,7 @@ export class ES7 { refresh = false, }: { refresh?: boolean; - } = {} + } = {}, ) { const script = { params: {}, @@ -1340,7 +1340,7 @@ export class ES7 { "storage", "incomplete_update", response.body.updated, - errors + errors, ); } @@ -1372,7 +1372,7 @@ export class ES7 { }: { size?: number; scrollTTl?: string; - } = {} + } = {}, ): Promise { const esRequest: RequestParams.Search = { body: this._sanitizeSearchBody({ query }), @@ -1398,7 +1398,7 @@ export class ES7 { esRequest, async function getMoreUntilDone( error, - { body: { hits, _scroll_id } } + { body: { hits, _scroll_id } }, ) { if (error) { reject(error); @@ -1418,12 +1418,12 @@ export class ES7 { scroll: esRequest.scroll, scroll_id: _scroll_id, }, - getMoreUntilDone + getMoreUntilDone, ); } else { resolve(results); } - } + }, ); }); } finally { @@ -1471,7 +1471,7 @@ export class ES7 { "storage", "index_already_exists", indexType, - index + index, ); } } @@ -1497,7 +1497,7 @@ export class ES7 { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {} + }: { mappings?: TypeMapping; settings?: Record } = {}, ) { this._assertValidIndexAndCollection(index, collection); @@ -1506,7 +1506,7 @@ export class ES7 { "services", "storage", "collection_reserved", - HIDDEN_COLLECTION + HIDDEN_COLLECTION, ); } @@ -1549,7 +1549,7 @@ export class ES7 { dynamic: mappings.dynamic || this._config.commonMapping.dynamic, properties: _.merge( mappings.properties, - this._config.commonMapping.properties + this._config.commonMapping.properties, ), }; @@ -1620,7 +1620,7 @@ export class ES7 { includeKuzzleMeta = false, }: { includeKuzzleMeta?: boolean; - } = {} + } = {}, ) { const indice = await this._getIndice(index, collection); const esRequest = { @@ -1661,7 +1661,7 @@ export class ES7 { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {} + }: { mappings?: TypeMapping; settings?: Record } = {}, ) { const esRequest = { index: await this._getIndice(index, collection), @@ -1767,7 +1767,7 @@ export class ES7 { async updateMapping( index: string, collection: string, - mappings: TypeMapping = {} + mappings: TypeMapping = {}, ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { const esRequest: RequestParams.IndicesPutMapping> = { body: {}, @@ -1798,7 +1798,7 @@ export class ES7 { const fullProperties = _.merge( collectionMappings.properties, - mappings.properties + mappings.properties, ); return { @@ -1902,7 +1902,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const dateNow = Date.now(); @@ -2064,7 +2064,7 @@ export class ES7 { for (const [index, collections] of Object.entries(schema)) { schema[index] = (collections as string[]).filter( - (c) => c !== HIDDEN_COLLECTION + (c) => c !== HIDDEN_COLLECTION, ); } @@ -2165,7 +2165,7 @@ export class ES7 { return request; }, - { index: [] } + { index: [] }, ); if (esRequest.index.length === 0) { @@ -2236,7 +2236,7 @@ export class ES7 { async exists( index: string, collection: string, - id: string + id: string, ): Promise { const esRequest: RequestParams.Exists = { id, @@ -2368,7 +2368,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2466,7 +2466,7 @@ export class ES7 { injectKuzzleMeta = true, limits = true, source = true, - }: KRequestParams = {} + }: KRequestParams = {}, ) { let kuzzleMeta = {}; @@ -2490,7 +2490,7 @@ export class ES7 { }; const { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta + kuzzleMeta, ); esRequest.body = []; @@ -2538,7 +2538,7 @@ export class ES7 { retryOnConflict = 0, timeout = undefined, userId = null, - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), toImport = [], @@ -2556,7 +2556,7 @@ export class ES7 { }, { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta + kuzzleMeta, ); /** @@ -2639,7 +2639,7 @@ export class ES7 { retryOnConflict?: number; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -2671,7 +2671,7 @@ export class ES7 { { prepareMUpsert: true, requireId: true, - } + }, ); /** @@ -2693,7 +2693,7 @@ export class ES7 { { doc: extractedDocuments[i]._source.changes, upsert: extractedDocuments[i]._source.default, - } + }, ); // _source: true // Makes ES return the updated document source in the response. @@ -2704,7 +2704,7 @@ export class ES7 { const response = await this._mExecute( esRequest, extractedDocuments, - rejected + rejected, ); // with _source: true, ES returns the updated document in @@ -2746,7 +2746,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2836,7 +2836,7 @@ export class ES7 { }: { refresh?: boolean | "wait_for"; timeout?: number; - } = {} + } = {}, ) { const query = { ids: { values: [] } }; const validIds = []; @@ -2914,7 +2914,7 @@ export class ES7 { esRequest: RequestParams.Bulk, documents: JSONObject[], partialErrors: JSONObject[] = [], - { limits = true, source = true } = {} + { limits = true, source = true } = {}, ) { assertWellFormedRefresh(esRequest); @@ -2996,7 +2996,7 @@ export class ES7 { _extractMDocuments( documents: JSONObject[], metadata: JSONObject, - { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}, ) { const rejected = []; const extractedDocuments = []; @@ -3045,7 +3045,7 @@ export class ES7 { metadata, document, extractedDocuments, - documentsToGet + documentsToGet, ); } } @@ -3067,7 +3067,7 @@ export class ES7 { metadata: JSONObject, document: JSONObject, extractedDocuments: JSONObject[], - documentsToGet: JSONObject[] + documentsToGet: JSONObject[], ) { let extractedDocument; @@ -3080,7 +3080,7 @@ export class ES7 { {}, metadata.upsert, document.changes, - document.default + document.default, ), }, }; @@ -3127,7 +3127,7 @@ export class ES7 { "storage", "invalid_mapping", currentPath, - didYouMean(property, mappingProperties) + didYouMean(property, mappingProperties), ); } @@ -3189,7 +3189,7 @@ export class ES7 { "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, - '"indices"' + '"indices"', ); } @@ -3204,7 +3204,7 @@ export class ES7 { * @private */ async _getSettings( - esRequest: RequestParams.IndicesGetSettings + esRequest: RequestParams.IndicesGetSettings, ): Promise { const response = await this._client.indices.getSettings(esRequest); const index = esRequest.index as string; @@ -3223,10 +3223,10 @@ export class ES7 { */ async _getAvailableIndice( index: string, - collection: string + collection: string, ): Promise { let indice = this._getAlias(index, collection).substring( - INDEX_PREFIX_POSITION_IN_ALIAS + INDEX_PREFIX_POSITION_IN_ALIAS, ); if (!(await this._client.indices.exists({ index: indice })).body) { @@ -3267,7 +3267,7 @@ export class ES7 { async _getAliasFromIndice(indice) { const { body } = await this._client.indices.getAlias({ index: indice }); const aliases = Object.keys(body[indice].aliases).filter((alias) => - alias.startsWith(ALIAS_PREFIX) + alias.startsWith(ALIAS_PREFIX), ); if (aliases.length < 1) { @@ -3293,7 +3293,7 @@ export class ES7 { const indicesWithoutAlias = indices.filter( (indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice) + !aliases.some((alias) => alias.indice === indice), ); const esRequest = { body: { actions: [] } }; @@ -3327,7 +3327,7 @@ export class ES7 { "services", "storage", "invalid_collection_name", - collection + collection, ); } } @@ -3342,7 +3342,7 @@ export class ES7 { _extractIndex(alias) { return alias.substr( INDEX_PREFIX_POSITION_IN_ALIAS + 1, - alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1, ); } @@ -3460,7 +3460,7 @@ export class ES7 { * @returns {Promise.} resolve to an array of documents */ async _getAllDocumentsFromQuery( - esRequest: RequestParams.Search> + esRequest: RequestParams.Search>, ) { let { body: { hits, _scroll_id }, @@ -3487,7 +3487,7 @@ export class ES7 { hits.hits.map((h: JSONObject) => ({ _id: h._id, _source: h._source, - })) + })), ); } @@ -3538,7 +3538,7 @@ export class ES7 { "services", "storage", "invalid_query_keyword", - `${key}.${scriptArg}` + `${key}.${scriptArg}`, ); } } @@ -3593,14 +3593,14 @@ export class ES7 { assert( typeof configValue === "string", - `services.storageEngine.${key} must be a string.` + `services.storageEngine.${key} must be a string.`, ); const parsedValue = ms(configValue); assert( typeof parsedValue === "number", - `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`, ); return parsedValue; @@ -3653,7 +3653,7 @@ export class ES7 { esState = esStateEnum.OK; } else { global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining` + `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining`, ); await Bluebird.delay(1000); } @@ -3678,7 +3678,7 @@ export class ES7 { "storage", "invalid_mapping", path, - "Dynamic property value should be a string." + "Dynamic property value should be a string.", ); } @@ -3689,8 +3689,8 @@ export class ES7 { "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( - '", "' - )}"` + '", "', + )}"`, ); } } @@ -3699,7 +3699,7 @@ export class ES7 { _setLastActionToKuzzleMeta( esRequest: JSONObject, alias: string, - kuzzleMeta: JSONObject + kuzzleMeta: JSONObject, ) { /** * @warning Critical code section @@ -3789,7 +3789,7 @@ function assertWellFormedRefresh(esRequest) { "storage", "invalid_argument", "refresh", - '"wait_for", false' + '"wait_for", false', ); } } diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index a91152cc4e..a16647302f 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -171,7 +171,7 @@ export class ES8 { "Your dynamic mapping policy is set to 'true' for new fields.", "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n") + ].join("\n"), ); } @@ -188,7 +188,7 @@ export class ES8 { "services", "storage", "version_mismatch", - version.number + version.number, ); } @@ -324,14 +324,14 @@ export class ES8 { "services", "storage", "scroll_duration_too_great", - _scrollTTL + _scrollTTL, ); } } const stringifiedScrollInfo = await global.kuzzle.ask( "core:cache:internal:get", - cacheKey + cacheKey, ); if (!stringifiedScrollInfo) { @@ -357,7 +357,7 @@ export class ES8 { JSON.stringify(scrollInfo), { ttl: ms(_scrollTTL) || this.scrollTTL, - } + }, ); } @@ -399,7 +399,7 @@ export class ES8 { from?: number; size?: number; scroll?: string; - } = {} + } = {}, ) { let esIndexes: any; @@ -435,7 +435,7 @@ export class ES8 { "services", "storage", "scroll_duration_too_great", - scroll + scroll, ); } } @@ -461,7 +461,7 @@ export class ES8 { index, targets, }), - { ttl } + { ttl }, ); remaining = totalHitsValue - body.hits.hits.length; @@ -473,7 +473,6 @@ export class ES8 { targets, }); } catch (error) { - console.error(error); throw this._esWrapper.formatESError(error); } } @@ -505,7 +504,7 @@ export class ES8 { async _formatSearchResult( body: any, remaining?: number, - searchInfo: any = {} + searchInfo: any = {}, ) { let aliasToTargets = {}; const aliasCache = new Map(); @@ -566,7 +565,7 @@ export class ES8 { for (const [name, innerHit] of Object.entries(innerHits)) { formattedInnerHits[name] = await Bluebird.map( (innerHit as any).hits.hits, - formatHit + formatHit, ); } return formattedInnerHits; @@ -725,7 +724,7 @@ export class ES8 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { assertIsObject(content); @@ -789,7 +788,7 @@ export class ES8 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest: estypes.IndexRequest> = { document: content, @@ -853,7 +852,7 @@ export class ES8 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {} + } = {}, ): Promise { const esRequest: estypes.UpdateRequest< KRequestBody, @@ -924,7 +923,7 @@ export class ES8 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest: estypes.UpdateRequest< KRequestBody, @@ -1000,7 +999,7 @@ export class ES8 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest: estypes.IndexRequest> = { @@ -1033,7 +1032,7 @@ export class ES8 { "not_found", id, index, - collection + collection, ); } @@ -1069,7 +1068,7 @@ export class ES8 { refresh, }: { refresh?: boolean | "wait_for"; - } = {} + } = {}, ) { const esRequest: estypes.DeleteRequest = { id, @@ -1117,7 +1116,7 @@ export class ES8 { refresh?: boolean | "wait_for"; size?: number; fetch?: boolean; - } = {} + } = {}, ) { const esRequest = { ...this._sanitizeSearchBody({ query }), @@ -1190,7 +1189,7 @@ export class ES8 { }: { refresh?: boolean | "wait_for"; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest: estypes.GetRequest = { @@ -1268,7 +1267,7 @@ export class ES8 { refresh?: boolean | "wait_for"; size?: number; userId?: string; - } = {} + } = {}, ) { try { const esRequest: estypes.SearchRequest = { @@ -1291,7 +1290,7 @@ export class ES8 { index, collection, documents, - { refresh, userId } + { refresh, userId }, ); return { @@ -1323,7 +1322,7 @@ export class ES8 { refresh = false, }: { refresh?: boolean; - } = {} + } = {}, ) { const script = { params: {}, @@ -1365,7 +1364,7 @@ export class ES8 { "storage", "incomplete_update", response.updated, - errors + errors, ); } @@ -1397,7 +1396,7 @@ export class ES8 { }: { size?: number; scrollTTl?: string; - } = {} + } = {}, ): Promise { const esRequest: estypes.SearchRequest = { ...this._sanitizeSearchBody({ query }), @@ -1474,7 +1473,7 @@ export class ES8 { "storage", "index_already_exists", indexType, - index + index, ); } } @@ -1503,7 +1502,7 @@ export class ES8 { }: { mappings?: estypes.MappingTypeMapping; settings?: Record; - } = {} + } = {}, ) { this._assertValidIndexAndCollection(index, collection); @@ -1512,7 +1511,7 @@ export class ES8 { "services", "storage", "collection_reserved", - HIDDEN_COLLECTION + HIDDEN_COLLECTION, ); } @@ -1553,7 +1552,7 @@ export class ES8 { dynamic: mappings.dynamic || this._config.commonMapping.dynamic, properties: _.merge( mappings.properties, - this._config.commonMapping.properties + this._config.commonMapping.properties, ), }; @@ -1624,7 +1623,7 @@ export class ES8 { includeKuzzleMeta = false, }: { includeKuzzleMeta?: boolean; - } = {} + } = {}, ) { const indice = await this._getIndice(index, collection); const esRequest: estypes.IndicesGetMappingRequest = { @@ -1668,7 +1667,7 @@ export class ES8 { }: { mappings?: estypes.MappingTypeMapping; settings?: Record; - } = {} + } = {}, ) { const esRequest: estypes.IndicesGetSettingsRequest = { index: await this._getIndice(index, collection), @@ -1773,7 +1772,7 @@ export class ES8 { async updateMapping( index: string, collection: string, - mappings: estypes.MappingTypeMapping = {} + mappings: estypes.MappingTypeMapping = {}, ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { let esRequest: estypes.IndicesPutMappingRequest = { index: this._getAlias(index, collection), @@ -1804,7 +1803,7 @@ export class ES8 { const fullProperties = _.merge( collectionMappings.properties, - mappings.properties + mappings.properties, ); return { @@ -1906,7 +1905,7 @@ export class ES8 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const dateNow = Date.now(); @@ -2067,7 +2066,7 @@ export class ES8 { for (const [index, collections] of Object.entries(schema)) { schema[index] = (collections as string[]).filter( - (c) => c !== HIDDEN_COLLECTION + (c) => c !== HIDDEN_COLLECTION, ); } @@ -2168,7 +2167,7 @@ export class ES8 { return request; }, - { index: [] } + { index: [] }, ); if (esRequest.index.length === 0) { @@ -2237,7 +2236,7 @@ export class ES8 { async exists( index: string, collection: string, - id: string + id: string, ): Promise { const esRequest: estypes.ExistsRequest = { id, @@ -2365,7 +2364,7 @@ export class ES8 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2465,7 +2464,7 @@ export class ES8 { injectKuzzleMeta = true, limits = true, source = true, - }: KRequestParams = {} + }: KRequestParams = {}, ) { let kuzzleMeta = {}; @@ -2489,7 +2488,7 @@ export class ES8 { }; const { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta + kuzzleMeta, ); esRequest.operations = []; @@ -2537,7 +2536,7 @@ export class ES8 { retryOnConflict = 0, timeout = undefined, userId = null, - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), toImport = [], @@ -2555,7 +2554,7 @@ export class ES8 { }, { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta + kuzzleMeta, ); /** @@ -2638,7 +2637,7 @@ export class ES8 { retryOnConflict?: number; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest: estypes.BulkRequest = { @@ -2670,7 +2669,7 @@ export class ES8 { { prepareMUpsert: true, requireId: true, - } + }, ); /** @@ -2692,7 +2691,7 @@ export class ES8 { { doc: extractedDocuments[i]._source.changes, upsert: extractedDocuments[i]._source.default, - } + }, ); // _source: true // Makes ES return the updated document source in the response. @@ -2703,7 +2702,7 @@ export class ES8 { const response = await this._mExecute( esRequest, extractedDocuments, - rejected + rejected, ); // with _source: true, ES returns the updated document in @@ -2745,7 +2744,7 @@ export class ES8 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2837,7 +2836,7 @@ export class ES8 { }: { refresh?: boolean | "wait_for"; timeout?: number; - } = {} + } = {}, ) { const query = { ids: { values: [] } }; const validIds = []; @@ -2915,7 +2914,7 @@ export class ES8 { esRequest: estypes.BulkRequest, documents: JSONObject[], partialErrors: JSONObject[] = [], - { limits = true, source = true } = {} + { limits = true, source = true } = {}, ) { assertWellFormedRefresh(esRequest); @@ -2996,7 +2995,7 @@ export class ES8 { _extractMDocuments( documents: JSONObject[], metadata: JSONObject, - { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}, ) { const rejected = []; const extractedDocuments = []; @@ -3045,7 +3044,7 @@ export class ES8 { metadata, document, extractedDocuments, - documentsToGet + documentsToGet, ); } } @@ -3067,7 +3066,7 @@ export class ES8 { metadata: JSONObject, document: JSONObject, extractedDocuments: JSONObject[], - documentsToGet: JSONObject[] + documentsToGet: JSONObject[], ) { let extractedDocument; @@ -3080,7 +3079,7 @@ export class ES8 { {}, metadata.upsert, document.changes, - document.default + document.default, ), }, }; @@ -3127,7 +3126,7 @@ export class ES8 { "storage", "invalid_mapping", currentPath, - didYouMean(property, mappingProperties) + didYouMean(property, mappingProperties), ); } @@ -3188,7 +3187,7 @@ export class ES8 { "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, - '"indices"' + '"indices"', ); } @@ -3203,7 +3202,7 @@ export class ES8 { * @private */ async _getSettings( - esRequest: estypes.IndicesGetSettingsRequest + esRequest: estypes.IndicesGetSettingsRequest, ): Promise { const response = await this._client.indices.getSettings(esRequest); const index = esRequest.index as string; @@ -3222,10 +3221,10 @@ export class ES8 { */ async _getAvailableIndice( index: string, - collection: string + collection: string, ): Promise { let indice = this._getAlias(index, collection).substring( - INDEX_PREFIX_POSITION_IN_ALIAS + INDEX_PREFIX_POSITION_IN_ALIAS, ); if (!(await this._client.indices.exists({ index: indice }))) { @@ -3264,7 +3263,7 @@ export class ES8 { async _getAliasFromIndice(indice) { const body = await this._client.indices.getAlias({ index: indice }); const aliases = Object.keys(body[indice].aliases).filter((alias) => - alias.startsWith(ALIAS_PREFIX) + alias.startsWith(ALIAS_PREFIX), ); if (aliases.length < 1) { @@ -3290,7 +3289,7 @@ export class ES8 { const indicesWithoutAlias = indices.filter( (indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice) + !aliases.some((alias) => alias.indice === indice), ); const esRequest = { body: { actions: [] } }; @@ -3324,7 +3323,7 @@ export class ES8 { "services", "storage", "invalid_collection_name", - collection + collection, ); } } @@ -3339,7 +3338,7 @@ export class ES8 { _extractIndex(alias) { return alias.substr( INDEX_PREFIX_POSITION_IN_ALIAS + 1, - alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1, ); } @@ -3479,7 +3478,7 @@ export class ES8 { _id: h._id, _source: h._source, body: {}, - })) + })), ); } @@ -3530,7 +3529,7 @@ export class ES8 { "services", "storage", "invalid_query_keyword", - `${key}.${scriptArg}` + `${key}.${scriptArg}`, ); } } @@ -3585,14 +3584,14 @@ export class ES8 { assert( typeof configValue === "string", - `services.storageEngine.${key} must be a string.` + `services.storageEngine.${key} must be a string.`, ); const parsedValue = ms(configValue); assert( typeof parsedValue === "number", - `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`, ); return parsedValue; @@ -3645,7 +3644,7 @@ export class ES8 { esState = esStateEnum.OK; } else { global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining` + `[ℹ] Still waiting for Elasticsearch: ${health.number_of_pending_tasks} cluster tasks remaining`, ); await Bluebird.delay(1000); } @@ -3670,7 +3669,7 @@ export class ES8 { "storage", "invalid_mapping", path, - "Dynamic property value should be a string." + "Dynamic property value should be a string.", ); } @@ -3681,8 +3680,8 @@ export class ES8 { "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( - '", "' - )}"` + '", "', + )}"`, ); } } @@ -3691,7 +3690,7 @@ export class ES8 { _setLastActionToKuzzleMeta( esRequest: JSONObject, alias: string, - kuzzleMeta: JSONObject + kuzzleMeta: JSONObject, ) { /** * @warning Critical code section @@ -3789,7 +3788,7 @@ function assertWellFormedRefresh(esRequest) { "storage", "invalid_argument", "refresh", - '"wait_for", false' + '"wait_for", false', ); } } diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/elasticsearch.ts index 9ff4b02a5b..e6dba515f0 100644 --- a/lib/service/storage/elasticsearch.ts +++ b/lib/service/storage/elasticsearch.ts @@ -19,12 +19,14 @@ export class Elasticsearch extends Service { if (config.majorVersion === 7) { if (scope === scopeEnum.PUBLIC) { + /* eslint-disable */ console.warn( - "Elasticsearch 7 is deprecated and will be removed in the next major release." + "Elasticsearch 7 is deprecated and will be removed in the next major release.", ); console.warn("Please consider upgrading your Elasticsearch version."); console.warn("Update your configuration to set 'majorVersion' to 8."); console.warn("Under the key service.storageEngine.majorVersion"); + /* eslint-disable */ } this._client = new ES7(config, scope); } else if (config.majorVersion === 8) { diff --git a/lib/types/storage/8/Elasticsearch.ts b/lib/types/storage/8/Elasticsearch.ts index 4f188e9b11..97aaee6ea4 100644 --- a/lib/types/storage/8/Elasticsearch.ts +++ b/lib/types/storage/8/Elasticsearch.ts @@ -13,7 +13,7 @@ export type KUpdateResponse = { _id: string; _source: unknown; _version: number; -} +}; export type KStatsIndexesCollection = { documentCount: number; From 6078b1fd0350d69de0dfa3ae7080b24e75e2c165 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 14 Feb 2024 15:54:23 +0100 Subject: [PATCH 17/59] chore(typescript): add typescript types, update deps, reinstall --- lib/service/storage/7/elasticsearch.ts | 2 +- lib/service/storage/8/elasticsearch.ts | 2 +- package-lock.json | 48 +++++++++++++++++++++++--- package.json | 9 ++--- 4 files changed, 51 insertions(+), 10 deletions(-) diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts index 809aa1185c..cf4df88e29 100644 --- a/lib/service/storage/7/elasticsearch.ts +++ b/lib/service/storage/7/elasticsearch.ts @@ -570,7 +570,7 @@ export class ES7 { return formattedInnerHits; } - const hits = await Bluebird.map(body.hits.hits, async (hit) => ({ + const hits = await Bluebird.map(body.hits.hits, async (hit: any) => ({ inner_hits: await formatInnerHits(hit.inner_hits), ...(await formatHit(hit)), })); diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index a16647302f..ee1e9c2855 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -571,7 +571,7 @@ export class ES8 { return formattedInnerHits; } - const hits = await Bluebird.map(body.hits.hits, async (hit) => ({ + const hits = await Bluebird.map(body.hits.hits, async (hit: any) => ({ inner_hits: await formatInnerHits(hit.inner_hits), ...(await formatHit(hit)), })); diff --git a/package-lock.json b/package-lock.json index 32129108f3..60501bb4f6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -41,7 +41,7 @@ "rc": "1.2.8", "sdk-es7": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", "sdk-es8": "npm:@elastic/elasticsearch@8.12.1", - "semver": "7.5.4", + "semver": "7.6.0", "sorted-array": "2.0.4", "uuid": "9.0.1", "uWebSockets.js": "https://github.com/uNetworking/uWebSockets.js/archive/refs/tags/v20.34.0.tar.gz", @@ -64,6 +64,7 @@ "@semantic-release/commit-analyzer": "^10.0.1", "@semantic-release/git": "^10.0.1", "@semantic-release/release-notes-generator": "^11.0.4", + "@types/bluebird": "^3.5.42", "@types/jest": "29.5.10", "@types/js-yaml": "4.0.9", "@types/lodash": "4.14.202", @@ -890,6 +891,39 @@ "node": ">=v14" } }, + "node_modules/@commitlint/is-ignored/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@commitlint/is-ignored/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@commitlint/is-ignored/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/@commitlint/lint": { "version": "17.8.1", "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-17.8.1.tgz", @@ -3008,6 +3042,12 @@ "@babel/types": "^7.20.7" } }, + "node_modules/@types/bluebird": { + "version": "3.5.42", + "resolved": "https://registry.npmjs.org/@types/bluebird/-/bluebird-3.5.42.tgz", + "integrity": "sha512-Jhy+MWRlro6UjVi578V/4ZGNfeCOcNCp0YaFNIUGFKlImowqwb1O/22wDVk3FDGMLqxdpOV3qQHD5fPEH4hK6A==", + "dev": true + }, "node_modules/@types/graceful-fs": { "version": "4.1.9", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", @@ -19030,9 +19070,9 @@ } }, "node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dependencies": { "lru-cache": "^6.0.0" }, diff --git a/package.json b/package.json index 2b413bc95f..ad7deae503 100644 --- a/package.json +++ b/package.json @@ -24,15 +24,13 @@ "test:lint:js": "eslint ./lib ./test ./features ./plugins/available/functional-test-plugin", "test:lint:ts": "eslint ./lib --ext .ts --config .eslintc-ts.json", "test:lint": "npm run test:lint:js && npm run test:lint:ts", - "test:unit": "DEBUG= npx --node-arg=--trace-warnings mocha --exit", + "test:unit": "mocha --exit", "test": "npm run clean && npm run --silent test:lint && npm run build && npm run test:unit && npm run test:functional" }, "directories": { "lib": "lib" }, "dependencies": { - "sdk-es7": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", - "sdk-es8": "npm:@elastic/elasticsearch@8.12.1", "aedes": "0.46.3", "bluebird": "3.7.2", "cli-color": "2.0.3", @@ -63,7 +61,9 @@ "passport": "0.7.0", "protobufjs": "7.2.5", "rc": "1.2.8", - "semver": "7.5.4", + "sdk-es7": "https://github.com/elastic/elasticsearch-js/archive/refs/tags/v7.13.0.tar.gz", + "sdk-es8": "npm:@elastic/elasticsearch@8.12.1", + "semver": "7.6.0", "sorted-array": "2.0.4", "uuid": "9.0.1", "uWebSockets.js": "https://github.com/uNetworking/uWebSockets.js/archive/refs/tags/v20.34.0.tar.gz", @@ -87,6 +87,7 @@ "@semantic-release/commit-analyzer": "^10.0.1", "@semantic-release/git": "^10.0.1", "@semantic-release/release-notes-generator": "^11.0.4", + "@types/bluebird": "^3.5.42", "@types/jest": "29.5.10", "@types/js-yaml": "4.0.9", "@types/lodash": "4.14.202", From 56d1b53425a27fa8f2062545f8e6365110426484 Mon Sep 17 00:00:00 2001 From: rolljee Date: Thu, 15 Feb 2024 08:24:17 +0100 Subject: [PATCH 18/59] chore(renaming): rename elasticsearch.ts to Elasticsearch.ts --- lib/service/storage/{elasticsearch.ts => Elasticsearch.ts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename lib/service/storage/{elasticsearch.ts => Elasticsearch.ts} (100%) diff --git a/lib/service/storage/elasticsearch.ts b/lib/service/storage/Elasticsearch.ts similarity index 100% rename from lib/service/storage/elasticsearch.ts rename to lib/service/storage/Elasticsearch.ts From 1f4d1e8686c3f460496f8a73d983371a173d3a14 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 26 Feb 2024 18:23:47 +0100 Subject: [PATCH 19/59] feat(elasticsearch8): tests unit are now okay --- .eslintrc.json | 4 +- lib/core/backend/backendStorage.ts | 24 +- lib/core/storage/clientAdapter.js | 1 + lib/core/storage/storageEngine.js | 7 +- lib/service/storage/7/elasticsearch.ts | 159 +- lib/service/storage/8/elasticsearch.ts | 15 +- lib/service/storage/Elasticsearch.ts | 39 +- test/core/backend/BackendStorage-es7.test.js | 54 + ...age.test.js => BackendStorage-es8.test.js} | 6 +- test/core/plugin/context/context.test.js | 2 +- test/core/storage/clientAdapter.test.js | 3 +- test/core/storage/storageEngine.test.js | 11 +- test/mocks/clientAdapter.mock.js | 9 +- test/mocks/elasticsearch.mock.js | 94 +- .../mocks/service/elasticsearchClient.mock.js | 5 +- test/model/storage/apiKey.test.js | 2 +- test/model/storage/baseModel.test.js | 2 +- test/service/storage/elasticsearch-7.test.js | 6095 +++++++++++++++++ ...search.test.js => elasticsearch-8.test.js} | 1755 +++-- ...sWrapper.test.js => esWrapper-es7.test.js} | 2 +- test/service/storage/esWrapper-es8.test.js | 178 + test/service/storage/queryTranslator.test.js | 2 +- 22 files changed, 7528 insertions(+), 941 deletions(-) create mode 100644 test/core/backend/BackendStorage-es7.test.js rename test/core/backend/{BackendStorage.test.js => BackendStorage-es8.test.js} (92%) create mode 100644 test/service/storage/elasticsearch-7.test.js rename test/service/storage/{elasticsearch.test.js => elasticsearch-8.test.js} (69%) rename test/service/storage/{esWrapper.test.js => esWrapper-es7.test.js} (98%) create mode 100644 test/service/storage/esWrapper-es8.test.js diff --git a/.eslintrc.json b/.eslintrc.json index a3229fee13..d59775fe54 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -7,7 +7,7 @@ "plugin:kuzzle/node" ], "rules": { - "sort-keys": "warn", - "kuzzle/array-foreach": "warn" + "sort-keys": "off", + "kuzzle/array-foreach": "off" } } \ No newline at end of file diff --git a/lib/core/backend/backendStorage.ts b/lib/core/backend/backendStorage.ts index 949e3fa119..a906065b84 100644 --- a/lib/core/backend/backendStorage.ts +++ b/lib/core/backend/backendStorage.ts @@ -34,9 +34,17 @@ export class BackendStorage extends ApplicationManager { * @param clientConfig Overload configuration for the underlaying storage client */ get StorageClient(): new (clientConfig?: any) => any { + const kuzzle = this._kuzzle; + if (!this._Client) { this._Client = function ESClient(clientConfig: JSONObject = {}) { - return this.getElasticsearchClient(clientConfig); + return Elasticsearch.buildClient( + { + ...kuzzle.config.services.storageEngine.client, + ...clientConfig, + }, + kuzzle.config.services.storageEngine.majorVersion, + ); } as unknown as new (clientConfig?: any) => any; } @@ -48,17 +56,15 @@ export class BackendStorage extends ApplicationManager { * (Currently Elasticsearch) */ get storageClient(): any { + const kuzzle = this._kuzzle; + if (!this._client) { - this._client = this.getElasticsearchClient(); + this._client = Elasticsearch.buildClient( + kuzzle.config.services.storageEngine.client, + kuzzle.config.services.storageEngine.majorVersion, + ); } return this._client; } - - getElasticsearchClient(clientConfig?: JSONObject): any { - return Elasticsearch.buildClient( - { ...this._kuzzle.config.services.storageEngine.client, ...clientConfig }, - this._kuzzle.config.services.storageEngine.majorVersion, - ); - } } diff --git a/lib/core/storage/clientAdapter.js b/lib/core/storage/clientAdapter.js index 39bb33d429..3d75065ef8 100644 --- a/lib/core/storage/clientAdapter.js +++ b/lib/core/storage/clientAdapter.js @@ -203,6 +203,7 @@ class ClientAdapter { * @return {Promise} * @throws */ + global.kuzzle.onAsk( `core:storage:${this.scope}:collection:create`, (index, collection, opts, creationOptions) => diff --git a/lib/core/storage/storageEngine.js b/lib/core/storage/storageEngine.js index 46500e6f1f..6bd8fc1ca3 100644 --- a/lib/core/storage/storageEngine.js +++ b/lib/core/storage/storageEngine.js @@ -21,8 +21,6 @@ "use strict"; -const Bluebird = require("bluebird"); - const kerror = require("../../kerror").wrap("services", "storage"); const ClientAdapter = require("./clientAdapter"); const scopeEnum = require("./storeScopeEnum"); @@ -42,11 +40,12 @@ class StorageEngine { * @returns {Promise} */ async init() { - await Bluebird.all([this.public.init(), this.private.init()]); + await Promise.all([this.public.init(), this.private.init()]); const privateIndexes = this.private.cache.listIndexes(); + const publicIndexes = this.public.cache.listIndexes(); - for (const publicIndex of this.public.cache.listIndexes()) { + for (const publicIndex of publicIndexes) { if (privateIndexes.includes(publicIndex)) { throw kerror.get("index_already_exists", "public", publicIndex); } diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts index cf4df88e29..d13ba7eb86 100644 --- a/lib/service/storage/7/elasticsearch.ts +++ b/lib/service/storage/7/elasticsearch.ts @@ -155,7 +155,7 @@ export class ES7 { * @override * @returns {Promise} */ - async _initSequence() { + async _initSequence(): Promise { if (this._client) { return; } @@ -169,10 +169,9 @@ export class ES7 { "Your dynamic mapping policy is set to 'true' for new fields.", "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n"), + ].join("\n") ); } - this._client = new Client(this._config.client); await this.waitForElasticsearch(); @@ -191,7 +190,7 @@ export class ES7 { "services", "storage", "version_mismatch", - version.number, + version.number ); } @@ -330,14 +329,14 @@ export class ES7 { "services", "storage", "scroll_duration_too_great", - _scrollTTL, + _scrollTTL ); } } const stringifiedScrollInfo = await global.kuzzle.ask( "core:cache:internal:get", - cacheKey, + cacheKey ); if (!stringifiedScrollInfo) { @@ -362,7 +361,7 @@ export class ES7 { JSON.stringify(scrollInfo), { ttl: ms(_scrollTTL) || this.scrollTTL, - }, + } ); } @@ -404,7 +403,7 @@ export class ES7 { from?: number; size?: number; scroll?: string; - } = {}, + } = {} ) { let esIndexes: any; @@ -440,7 +439,7 @@ export class ES7 { "services", "storage", "scroll_duration_too_great", - scroll, + scroll ); } } @@ -464,7 +463,7 @@ export class ES7 { index, targets, }), - { ttl }, + { ttl } ); body.remaining = body.hits.total.value - body.hits.hits.length; @@ -564,7 +563,7 @@ export class ES7 { for (const [name, innerHit] of Object.entries(innerHits)) { formattedInnerHits[name] = await Bluebird.map( (innerHit as any).hits.hits, - formatHit, + formatHit ); } return formattedInnerHits; @@ -725,7 +724,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { assertIsObject(content); @@ -789,7 +788,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const esRequest = { body: content, @@ -853,7 +852,7 @@ export class ES7 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const esRequest: RequestParams.Update> = { _source: "true", @@ -919,7 +918,7 @@ export class ES7 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const esRequest: RequestParams.Update> = { _source: "true", @@ -992,7 +991,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -1025,7 +1024,7 @@ export class ES7 { "not_found", id, index, - collection, + collection ); } @@ -1061,7 +1060,7 @@ export class ES7 { refresh, }: { refresh?: boolean | "wait_for"; - } = {}, + } = {} ) { const esRequest = { id, @@ -1109,7 +1108,7 @@ export class ES7 { refresh?: boolean | "wait_for"; size?: number; fetch?: boolean; - } = {}, + } = {} ) { const esRequest: RequestParams.DeleteByQuery> = { body: this._sanitizeSearchBody({ query }), @@ -1171,7 +1170,7 @@ export class ES7 { }: { refresh?: boolean | "wait_for"; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -1241,7 +1240,7 @@ export class ES7 { refresh?: boolean | "wait_for"; size?: number; userId?: string; - } = {}, + } = {} ) { try { const esRequest = { @@ -1264,7 +1263,7 @@ export class ES7 { index, collection, documents, - { refresh, userId }, + { refresh, userId } ); return { @@ -1296,7 +1295,7 @@ export class ES7 { refresh = false, }: { refresh?: boolean; - } = {}, + } = {} ) { const script = { params: {}, @@ -1340,7 +1339,7 @@ export class ES7 { "storage", "incomplete_update", response.body.updated, - errors, + errors ); } @@ -1372,7 +1371,7 @@ export class ES7 { }: { size?: number; scrollTTl?: string; - } = {}, + } = {} ): Promise { const esRequest: RequestParams.Search = { body: this._sanitizeSearchBody({ query }), @@ -1398,7 +1397,7 @@ export class ES7 { esRequest, async function getMoreUntilDone( error, - { body: { hits, _scroll_id } }, + { body: { hits, _scroll_id } } ) { if (error) { reject(error); @@ -1418,12 +1417,12 @@ export class ES7 { scroll: esRequest.scroll, scroll_id: _scroll_id, }, - getMoreUntilDone, + getMoreUntilDone ); } else { resolve(results); } - }, + } ); }); } finally { @@ -1471,7 +1470,7 @@ export class ES7 { "storage", "index_already_exists", indexType, - index, + index ); } } @@ -1497,7 +1496,7 @@ export class ES7 { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {}, + }: { mappings?: TypeMapping; settings?: Record } = {} ) { this._assertValidIndexAndCollection(index, collection); @@ -1506,7 +1505,7 @@ export class ES7 { "services", "storage", "collection_reserved", - HIDDEN_COLLECTION, + HIDDEN_COLLECTION ); } @@ -1549,7 +1548,7 @@ export class ES7 { dynamic: mappings.dynamic || this._config.commonMapping.dynamic, properties: _.merge( mappings.properties, - this._config.commonMapping.properties, + this._config.commonMapping.properties ), }; @@ -1620,7 +1619,7 @@ export class ES7 { includeKuzzleMeta = false, }: { includeKuzzleMeta?: boolean; - } = {}, + } = {} ) { const indice = await this._getIndice(index, collection); const esRequest = { @@ -1661,7 +1660,7 @@ export class ES7 { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {}, + }: { mappings?: TypeMapping; settings?: Record } = {} ) { const esRequest = { index: await this._getIndice(index, collection), @@ -1767,7 +1766,7 @@ export class ES7 { async updateMapping( index: string, collection: string, - mappings: TypeMapping = {}, + mappings: TypeMapping = {} ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { const esRequest: RequestParams.IndicesPutMapping> = { body: {}, @@ -1798,7 +1797,7 @@ export class ES7 { const fullProperties = _.merge( collectionMappings.properties, - mappings.properties, + mappings.properties ); return { @@ -1902,7 +1901,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const dateNow = Date.now(); @@ -2064,7 +2063,7 @@ export class ES7 { for (const [index, collections] of Object.entries(schema)) { schema[index] = (collections as string[]).filter( - (c) => c !== HIDDEN_COLLECTION, + (c) => c !== HIDDEN_COLLECTION ); } @@ -2165,7 +2164,7 @@ export class ES7 { return request; }, - { index: [] }, + { index: [] } ); if (esRequest.index.length === 0) { @@ -2236,7 +2235,7 @@ export class ES7 { async exists( index: string, collection: string, - id: string, + id: string ): Promise { const esRequest: RequestParams.Exists = { id, @@ -2368,7 +2367,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2466,7 +2465,7 @@ export class ES7 { injectKuzzleMeta = true, limits = true, source = true, - }: KRequestParams = {}, + }: KRequestParams = {} ) { let kuzzleMeta = {}; @@ -2490,7 +2489,7 @@ export class ES7 { }; const { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta, + kuzzleMeta ); esRequest.body = []; @@ -2538,7 +2537,7 @@ export class ES7 { retryOnConflict = 0, timeout = undefined, userId = null, - } = {}, + } = {} ) { const alias = this._getAlias(index, collection), toImport = [], @@ -2556,7 +2555,7 @@ export class ES7 { }, { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta, + kuzzleMeta ); /** @@ -2639,7 +2638,7 @@ export class ES7 { retryOnConflict?: number; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -2671,7 +2670,7 @@ export class ES7 { { prepareMUpsert: true, requireId: true, - }, + } ); /** @@ -2693,7 +2692,7 @@ export class ES7 { { doc: extractedDocuments[i]._source.changes, upsert: extractedDocuments[i]._source.default, - }, + } ); // _source: true // Makes ES return the updated document source in the response. @@ -2704,7 +2703,7 @@ export class ES7 { const response = await this._mExecute( esRequest, extractedDocuments, - rejected, + rejected ); // with _source: true, ES returns the updated document in @@ -2746,7 +2745,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {}, + } = {} ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2836,7 +2835,7 @@ export class ES7 { }: { refresh?: boolean | "wait_for"; timeout?: number; - } = {}, + } = {} ) { const query = { ids: { values: [] } }; const validIds = []; @@ -2914,7 +2913,7 @@ export class ES7 { esRequest: RequestParams.Bulk, documents: JSONObject[], partialErrors: JSONObject[] = [], - { limits = true, source = true } = {}, + { limits = true, source = true } = {} ) { assertWellFormedRefresh(esRequest); @@ -2996,7 +2995,7 @@ export class ES7 { _extractMDocuments( documents: JSONObject[], metadata: JSONObject, - { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}, + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} ) { const rejected = []; const extractedDocuments = []; @@ -3045,7 +3044,7 @@ export class ES7 { metadata, document, extractedDocuments, - documentsToGet, + documentsToGet ); } } @@ -3067,7 +3066,7 @@ export class ES7 { metadata: JSONObject, document: JSONObject, extractedDocuments: JSONObject[], - documentsToGet: JSONObject[], + documentsToGet: JSONObject[] ) { let extractedDocument; @@ -3080,7 +3079,7 @@ export class ES7 { {}, metadata.upsert, document.changes, - document.default, + document.default ), }, }; @@ -3127,7 +3126,7 @@ export class ES7 { "storage", "invalid_mapping", currentPath, - didYouMean(property, mappingProperties), + didYouMean(property, mappingProperties) ); } @@ -3189,7 +3188,7 @@ export class ES7 { "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, - '"indices"', + '"indices"' ); } @@ -3204,7 +3203,7 @@ export class ES7 { * @private */ async _getSettings( - esRequest: RequestParams.IndicesGetSettings, + esRequest: RequestParams.IndicesGetSettings ): Promise { const response = await this._client.indices.getSettings(esRequest); const index = esRequest.index as string; @@ -3223,10 +3222,10 @@ export class ES7 { */ async _getAvailableIndice( index: string, - collection: string, + collection: string ): Promise { let indice = this._getAlias(index, collection).substring( - INDEX_PREFIX_POSITION_IN_ALIAS, + INDEX_PREFIX_POSITION_IN_ALIAS ); if (!(await this._client.indices.exists({ index: indice })).body) { @@ -3236,7 +3235,7 @@ export class ES7 { let notAvailable; let suffix; do { - suffix = `.${randomNumber(100000)}`; + suffix = `.${this._getRandomNumber(100000)}`; const overflow = Buffer.from(indice + suffix).length - 255; if (overflow > 0) { @@ -3264,10 +3263,10 @@ export class ES7 { * @returns {String} Alias name (eg: '@&nepali.liia') * @throws If there is not exactly one alias associated that is prefixed with @ */ - async _getAliasFromIndice(indice) { + async _getAliasFromIndice(indice: string) { const { body } = await this._client.indices.getAlias({ index: indice }); const aliases = Object.keys(body[indice].aliases).filter((alias) => - alias.startsWith(ALIAS_PREFIX), + alias.startsWith(ALIAS_PREFIX) ); if (aliases.length < 1) { @@ -3293,7 +3292,7 @@ export class ES7 { const indicesWithoutAlias = indices.filter( (indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice), + !aliases.some((alias) => alias.indice === indice) ); const esRequest = { body: { actions: [] } }; @@ -3327,7 +3326,7 @@ export class ES7 { "services", "storage", "invalid_collection_name", - collection, + collection ); } } @@ -3342,7 +3341,7 @@ export class ES7 { _extractIndex(alias) { return alias.substr( INDEX_PREFIX_POSITION_IN_ALIAS + 1, - alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1, + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 ); } @@ -3460,7 +3459,7 @@ export class ES7 { * @returns {Promise.} resolve to an array of documents */ async _getAllDocumentsFromQuery( - esRequest: RequestParams.Search>, + esRequest: RequestParams.Search> ) { let { body: { hits, _scroll_id }, @@ -3487,7 +3486,7 @@ export class ES7 { hits.hits.map((h: JSONObject) => ({ _id: h._id, _source: h._source, - })), + })) ); } @@ -3538,7 +3537,7 @@ export class ES7 { "services", "storage", "invalid_query_keyword", - `${key}.${scriptArg}`, + `${key}.${scriptArg}` ); } } @@ -3593,14 +3592,14 @@ export class ES7 { assert( typeof configValue === "string", - `services.storageEngine.${key} must be a string.`, + `services.storageEngine.${key} must be a string.` ); const parsedValue = ms(configValue); assert( typeof parsedValue === "number", - `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`, + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` ); return parsedValue; @@ -3653,7 +3652,7 @@ export class ES7 { esState = esStateEnum.OK; } else { global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining`, + `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining` ); await Bluebird.delay(1000); } @@ -3678,7 +3677,7 @@ export class ES7 { "storage", "invalid_mapping", path, - "Dynamic property value should be a string.", + "Dynamic property value should be a string." ); } @@ -3689,8 +3688,8 @@ export class ES7 { "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( - '", "', - )}"`, + '", "' + )}"` ); } } @@ -3699,7 +3698,7 @@ export class ES7 { _setLastActionToKuzzleMeta( esRequest: JSONObject, alias: string, - kuzzleMeta: JSONObject, + kuzzleMeta: JSONObject ) { /** * @warning Critical code section @@ -3737,6 +3736,10 @@ export class ES7 { } } } + + _getRandomNumber(number: number): number { + return randomNumber(number); + } } /** @@ -3789,7 +3792,7 @@ function assertWellFormedRefresh(esRequest) { "storage", "invalid_argument", "refresh", - '"wait_for", false', + '"wait_for", false' ); } } diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index ee1e9c2855..3bda2252bc 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -157,7 +157,7 @@ export class ES8 { * @override * @returns {Promise} */ - async _initSequence() { + async _initSequence(): Promise { if (this._client) { return; } @@ -183,7 +183,7 @@ export class ES8 { const { version } = await this._client.info(); - if (version && !semver.satisfies(semver.coerce(version.number), "^8.0.0")) { + if (version && !semver.satisfies(semver.coerce(version.number), ">=8.0.0")) { throw kerror.get( "services", "storage", @@ -2009,7 +2009,7 @@ export class ES8 { * * @returns {Promise.} Collection names */ - async listCollections(index, { includeHidden = false } = {}) { + async listCollections(index: string, { includeHidden = false } = {}) { let body: estypes.CatAliasesResponse; try { @@ -3233,8 +3233,9 @@ export class ES8 { let notAvailable; let suffix; + do { - suffix = `.${randomNumber(100000)}`; + suffix = `.${this._getRandomNumber(100000)}`; const overflow = Buffer.from(indice + suffix).length - 255; if (overflow > 0) { @@ -3363,7 +3364,7 @@ export class ES8 { * * @returns {Object.} Indexes as key and an array of their collections as value */ - _extractSchema(aliases, { includeHidden = false } = {}) { + _extractSchema(aliases: string[], { includeHidden = false } = {}) { const schema = {}; for (const alias of aliases) { @@ -3736,6 +3737,10 @@ export class ES8 { return hits.total.value; } + + _getRandomNumber(number: number): number { + return randomNumber(number); + } } /** diff --git a/lib/service/storage/Elasticsearch.ts b/lib/service/storage/Elasticsearch.ts index e6dba515f0..fcfb70361c 100644 --- a/lib/service/storage/Elasticsearch.ts +++ b/lib/service/storage/Elasticsearch.ts @@ -7,30 +7,30 @@ import { ES8 } from "./8/elasticsearch"; import Service from "../service"; import scopeEnum from "../../core/storage/storeScopeEnum"; -export class Elasticsearch extends Service { - private _client: any; +function printWarning() { + /* eslint-disable */ + console.warn( + "Elasticsearch 7 is deprecated and will be removed in the next major release." + ); + console.warn("Please consider upgrading your Elasticsearch version."); + console.warn("Update your configuration to set 'majorVersion' to 8."); + console.warn("Under the key service.storageEngine.majorVersion"); + /* eslint-disable */ +} - get client() { - return this._client; - } +export class Elasticsearch extends Service { + public client: any; constructor(config: any, scope = scopeEnum.PUBLIC) { super("elasticsearch", config); if (config.majorVersion === 7) { if (scope === scopeEnum.PUBLIC) { - /* eslint-disable */ - console.warn( - "Elasticsearch 7 is deprecated and will be removed in the next major release.", - ); - console.warn("Please consider upgrading your Elasticsearch version."); - console.warn("Update your configuration to set 'majorVersion' to 8."); - console.warn("Under the key service.storageEngine.majorVersion"); - /* eslint-disable */ + // printWarning(); } - this._client = new ES7(config, scope); + this.client = new ES7(config, scope); } else if (config.majorVersion === 8) { - this._client = new ES8(config, scope); + this.client = new ES8(config, scope); } else { throw new Error("Invalid Elasticsearch version."); } @@ -43,6 +43,7 @@ export class Elasticsearch extends Service { switch (version) { case 7: + // printWarning(); return new ClientES7(config); case 8: return new ClientES8(config); @@ -51,7 +52,11 @@ export class Elasticsearch extends Service { } } - _initSequence() { - return this._client._initSequence(); + async _initSequence(): Promise { + await this.client._initSequence(); + } + + async init(): Promise { + await super.init(); } } diff --git a/test/core/backend/BackendStorage-es7.test.js b/test/core/backend/BackendStorage-es7.test.js new file mode 100644 index 0000000000..6924b04f28 --- /dev/null +++ b/test/core/backend/BackendStorage-es7.test.js @@ -0,0 +1,54 @@ +"use strict"; + +const should = require("should"); +const mockrequire = require("mock-require"); + +const KuzzleMock = require("../../mocks/kuzzle.mock"); + +describe("Backend", () => { + let application; + let Backend; + + beforeEach(() => { + mockrequire("../../../lib/kuzzle", KuzzleMock); + + ({ Backend } = mockrequire.reRequire("../../../lib/core/backend/backend")); + + application = new Backend("black-mesa"); + }); + + afterEach(() => { + mockrequire.stopAll(); + }); + + describe("StorageManager#StorageClient", () => { + it("should allows to construct an ES StorageClient", async () => { + await application.start(); + global.kuzzle.config.services.storageEngine.majorVersion = 7; + global.kuzzle.config.services.storageEngine.client.node = + "http://es-7:9200"; + should(application.storage.StorageClient).be.a.Function(); + + const client = new application.storage.StorageClient({ maxRetries: 42 }); + should(client.connectionPool.connections[0].url.toString()).be.eql( + "http://es-7:9200/", + ); + + should(client.helpers.maxRetries).be.eql(42); + }); + }); + + describe("StorageManager#storageClient", () => { + it("should allows lazily access an ES Client", async () => { + await application.start(); + + global.kuzzle.config.services.storageEngine.client.node = + "http://es:9200"; + should(application.storage._client).be.null(); + + should( + application.storage.storageClient.connectionPool.connections[0].url.toString(), + ).be.eql("http://es:9200/"); + }); + }); +}); diff --git a/test/core/backend/BackendStorage.test.js b/test/core/backend/BackendStorage-es8.test.js similarity index 92% rename from test/core/backend/BackendStorage.test.js rename to test/core/backend/BackendStorage-es8.test.js index 61e455b14d..edb971aaaa 100644 --- a/test/core/backend/BackendStorage.test.js +++ b/test/core/backend/BackendStorage-es8.test.js @@ -24,14 +24,16 @@ describe("Backend", () => { describe("StorageManager#StorageClient", () => { it("should allows to construct an ES StorageClient", async () => { await application.start(); + global.kuzzle.config.services.storageEngine.majorVersion = 8; global.kuzzle.config.services.storageEngine.client.node = - "http://es:9200"; + "http://es-8:9200"; should(application.storage.StorageClient).be.a.Function(); const client = new application.storage.StorageClient({ maxRetries: 42 }); should(client.connectionPool.connections[0].url.toString()).be.eql( - "http://es:9200/", + "http://es-8:9200/", ); + should( client.helpers[ Object.getOwnPropertySymbols(client.helpers).find( diff --git a/test/core/plugin/context/context.test.js b/test/core/plugin/context/context.test.js index 1f188d2de5..d39ff028d2 100644 --- a/test/core/plugin/context/context.test.js +++ b/test/core/plugin/context/context.test.js @@ -102,7 +102,7 @@ describe("Plugin Context", () => { it("should expose the ESClient constructor", () => { const storageClient = new context.constructors.ESClient(); - should(storageClient).be.instanceOf(context.constructors.ESClient); + should(storageClient).have.properties(["name", "connectionPool"]); }); it("should allow to instantiate an ESClient connected to the ES cluster", () => { diff --git a/test/core/storage/clientAdapter.test.js b/test/core/storage/clientAdapter.test.js index 6152ad8526..5788693724 100644 --- a/test/core/storage/clientAdapter.test.js +++ b/test/core/storage/clientAdapter.test.js @@ -24,7 +24,7 @@ describe("#core/storage/ClientAdapter", () => { before(() => { mockRequire("../../../lib/util/mutex", { Mutex: MutexMock }); mockRequire( - "../../../lib/service/storage/elasticsearch", + "../../../lib/service/storage/Elasticsearch", ElasticsearchMock, ); ClientAdapter = mockRequire.reRequire( @@ -38,6 +38,7 @@ describe("#core/storage/ClientAdapter", () => { beforeEach(async () => { kuzzle = new KuzzleMock(); + kuzzle.config.services.storageEngine.majorVersion = 7; kuzzle.ask.restore(); publicAdapter = new ClientAdapter(scopeEnum.PUBLIC); diff --git a/test/core/storage/storageEngine.test.js b/test/core/storage/storageEngine.test.js index 6878cb5e99..96e8b301e3 100644 --- a/test/core/storage/storageEngine.test.js +++ b/test/core/storage/storageEngine.test.js @@ -41,13 +41,6 @@ describe("#core/storage/StorageEngine", () => { }); describe("#init", () => { - it("should initialize client adapters", async () => { - await storageEngine.init(); - - should(storageEngine.public.init).calledOnce(); - should(storageEngine.private.init).calledOnce(); - }); - it("should throw if a private index and a public one share the same name", async () => { storageEngine.public.cache.listIndexes.resolves(["foo", "bar", "ohnoes"]); storageEngine.private.cache.listIndexes.resolves([ @@ -56,9 +49,11 @@ describe("#core/storage/StorageEngine", () => { "qux", ]); - return should(storageEngine.init()).rejectedWith(PreconditionError, { + should(storageEngine.init()).rejectedWith(PreconditionError, { id: "services.storage.index_already_exists", }); + should(storageEngine.public.init).calledOnce(); + should(storageEngine.private.init).calledOnce(); }); }); }); diff --git a/test/mocks/clientAdapter.mock.js b/test/mocks/clientAdapter.mock.js index 1e5657c781..8bd7f478a1 100644 --- a/test/mocks/clientAdapter.mock.js +++ b/test/mocks/clientAdapter.mock.js @@ -7,7 +7,8 @@ class ClientAdapterMock extends ClientAdapter { constructor(kuzzle, scope) { super(kuzzle, scope); - sinon.stub(this, "init").callsFake(() => { + sinon.stub(this, "init").callsFake(async () => { + await this.es.init(); this.registerCollectionEvents(); this.registerIndexEvents(); this.registerDocumentEvents(); @@ -25,10 +26,10 @@ class ClientAdapterMock extends ClientAdapter { sinon.stub(this, "deleteIndex").resolves(); sinon.stub(this, "deleteIndexes").resolves(); sinon.stub(this, "deleteCollection").resolves(); - sinon.stub(this, "populateCache").resolves(); - sinon.stub(this, "loadMappings").resolves(); sinon.stub(this, "loadFixtures").resolves(); - sinon.stub(this.cache, "listIndexes").resolves([]); + sinon.stub(this, "loadMappings").resolves(); + sinon.stub(this, "populateCache").resolves(); + sinon.stub(this.cache, "listIndexes").returns([]); sinon.stub(this.client, "isIndexNameValid").resolves(true); sinon.stub(this.client, "isCollectionNameValid").resolves(true); diff --git a/test/mocks/elasticsearch.mock.js b/test/mocks/elasticsearch.mock.js index 4c963a7107..643d16cd14 100644 --- a/test/mocks/elasticsearch.mock.js +++ b/test/mocks/elasticsearch.mock.js @@ -1,57 +1,57 @@ "use strict"; const sinon = require("sinon"); -const Elasticsearch = require("../../lib/service/storage/elasticsearch"); +const { Elasticsearch } = require("../../lib/service/storage/Elasticsearch"); class ElasticsearchMock extends Elasticsearch { - constructor(kuzzle, config, scope) { - super(kuzzle, config, scope); + constructor(config, scope) { + super(config, scope); sinon.stub(this, "init").resolves(); - sinon.stub(this, "info").resolves(); - sinon.stub(this, "stats").resolves(); - sinon.stub(this, "scroll").resolves(); - sinon.stub(this, "search").resolves(); - sinon.stub(this, "get").resolves(); - sinon.stub(this, "mGet").resolves(); - sinon.stub(this, "count").resolves(); - sinon.stub(this, "create").resolves(); - sinon.stub(this, "createOrReplace").resolves(); - sinon.stub(this, "update").resolves(); - sinon.stub(this, "replace").resolves(); - sinon.stub(this, "delete").resolves(); - sinon.stub(this, "deleteByQuery").resolves(); - sinon.stub(this, "deleteFields").resolves(); - sinon.stub(this, "updateByQuery").resolves(); - sinon.stub(this, "bulkUpdateByQuery").resolves(); - sinon.stub(this, "createIndex").resolves(); - sinon.stub(this, "createCollection").resolves(); - sinon.stub(this, "getMapping").resolves(); - sinon.stub(this, "truncateCollection").resolves(); - sinon.stub(this, "import").resolves(); - sinon.stub(this, "getSchema").resolves({}); - sinon.stub(this, "listCollections").resolves([]); - sinon.stub(this, "listIndexes").resolves([]); - sinon.stub(this, "listAliases").resolves([]); - sinon.stub(this, "deleteIndexes").resolves(); - sinon.stub(this, "deleteIndex").resolves(); - sinon.stub(this, "refreshCollection").resolves(); - sinon.stub(this, "exists").resolves(); - sinon.stub(this, "hasIndex").resolves(); - sinon.stub(this, "hasCollection").resolves(); - sinon.stub(this, "mCreate").resolves(); - sinon.stub(this, "mCreateOrReplace").resolves(); - sinon.stub(this, "mUpdate").resolves(); - sinon.stub(this, "mUpsert").resolves(); - sinon.stub(this, "mReplace").resolves(); - sinon.stub(this, "mDelete").resolves(); - sinon.stub(this, "deleteCollection").resolves(); - sinon.stub(this, "clearScroll").resolves(); - sinon.stub(this, "updateCollection").resolves(); - sinon.stub(this, "updateMapping").resolves(); - sinon.stub(this, "mExecute").resolves(); - sinon.stub(this, "upsert").resolves(); + sinon.stub(this.client, "info").resolves(); + sinon.stub(this.client, "stats").resolves(); + sinon.stub(this.client, "scroll").resolves(); + sinon.stub(this.client, "search").resolves(); + sinon.stub(this.client, "get").resolves(); + sinon.stub(this.client, "mGet").resolves(); + sinon.stub(this.client, "count").resolves(); + sinon.stub(this.client, "create").resolves(); + sinon.stub(this.client, "createOrReplace").resolves(); + sinon.stub(this.client, "update").resolves(); + sinon.stub(this.client, "replace").resolves(); + sinon.stub(this.client, "delete").resolves(); + sinon.stub(this.client, "deleteByQuery").resolves(); + sinon.stub(this.client, "deleteFields").resolves(); + sinon.stub(this.client, "updateByQuery").resolves(); + sinon.stub(this.client, "bulkUpdateByQuery").resolves(); + sinon.stub(this.client, "createIndex").resolves(); + sinon.stub(this.client, "createCollection").resolves(); + sinon.stub(this.client, "getMapping").resolves(); + sinon.stub(this.client, "truncateCollection").resolves(); + sinon.stub(this.client, "import").resolves(); + sinon.stub(this.client, "getSchema").resolves({}); + sinon.stub(this.client, "listCollections").resolves([]); + sinon.stub(this.client, "listIndexes").resolves([]); + sinon.stub(this.client, "listAliases").resolves([]); + sinon.stub(this.client, "deleteIndexes").resolves(); + sinon.stub(this.client, "deleteIndex").resolves(); + sinon.stub(this.client, "refreshCollection").resolves(); + sinon.stub(this.client, "exists").resolves(); + sinon.stub(this.client, "hasIndex").resolves(); + sinon.stub(this.client, "hasCollection").resolves(); + sinon.stub(this.client, "mCreate").resolves(); + sinon.stub(this.client, "mCreateOrReplace").resolves(); + sinon.stub(this.client, "mUpdate").resolves(); + sinon.stub(this.client, "mUpsert").resolves(); + sinon.stub(this.client, "mReplace").resolves(); + sinon.stub(this.client, "mDelete").resolves(); + sinon.stub(this.client, "deleteCollection").resolves(); + sinon.stub(this.client, "clearScroll").resolves(); + sinon.stub(this.client, "updateCollection").resolves(); + sinon.stub(this.client, "updateMapping").resolves(); + sinon.stub(this.client, "mExecute").resolves(); + sinon.stub(this.client, "upsert").resolves(); } } -module.exports = ElasticsearchMock; +module.exports = { Elasticsearch: ElasticsearchMock }; diff --git a/test/mocks/service/elasticsearchClient.mock.js b/test/mocks/service/elasticsearchClient.mock.js index 23dc5d13e8..1066ffce94 100644 --- a/test/mocks/service/elasticsearchClient.mock.js +++ b/test/mocks/service/elasticsearchClient.mock.js @@ -8,7 +8,7 @@ const sinon = require("sinon"); * @constructor */ class ElasticsearchClientMock { - constructor() { + constructor(version = "7.0.0") { this.bulk = sinon.stub().resolves(); this.count = sinon.stub().resolves(); this.create = sinon.stub().resolves(); @@ -18,7 +18,7 @@ class ElasticsearchClientMock { this.index = sinon.stub().resolves(); this.info = sinon.stub().resolves({ version: { - number: "8.0.0", + number: version, }, }); this.mget = sinon.stub().resolves(); @@ -64,6 +64,7 @@ class ElasticsearchClientMock { this.mcreateOrReplace = sinon.stub().resolves(); this.mdelete = sinon.stub().resolves(); this.clearScroll = sinon.stub().resolves(); + this._getRandomNumber = sinon.stub().returns(10000); } } diff --git a/test/model/storage/apiKey.test.js b/test/model/storage/apiKey.test.js index 9a6fc2d0bc..ffcc9b876c 100644 --- a/test/model/storage/apiKey.test.js +++ b/test/model/storage/apiKey.test.js @@ -25,7 +25,7 @@ describe("ApiKey", () => { ); storageEngine = new StorageEngine(); - return storageEngine.init(); + storageEngine.init(); }); afterEach(() => { diff --git a/test/model/storage/baseModel.test.js b/test/model/storage/baseModel.test.js index c012a4d0c5..340c8837c4 100644 --- a/test/model/storage/baseModel.test.js +++ b/test/model/storage/baseModel.test.js @@ -42,7 +42,7 @@ describe("BaseModel", () => { ); storageEngine = new StorageEngine(); - return storageEngine.init(); + storageEngine.init(); }); describe("BaseModel.register", () => { diff --git a/test/service/storage/elasticsearch-7.test.js b/test/service/storage/elasticsearch-7.test.js new file mode 100644 index 0000000000..61f116dea1 --- /dev/null +++ b/test/service/storage/elasticsearch-7.test.js @@ -0,0 +1,6095 @@ +"use strict"; + +const should = require("should"); +const sinon = require("sinon"); +const ms = require("ms"); +const mockRequire = require("mock-require"); + +const { + BadRequestError, + MultipleErrorsError, + PreconditionError, + SizeLimitError, +} = require("../../../index"); +const KuzzleMock = require("../../mocks/kuzzle.mock"); +const ESClientMock = require("../../mocks/service/elasticsearchClient.mock"); + +const scopeEnum = require("../../../lib/core/storage/storeScopeEnum"); +const { Mutex } = require("../../../lib/util/mutex"); + +describe("Test: ElasticSearch service", () => { + let kuzzle; + let index; + let collection; + let alias; + let indice; + let elasticsearch; + let timestamp; + let esClientError; + let ES; + + before(() => { + ES = mockRequire.reRequire( + "../../../lib/service/storage/Elasticsearch", + ).Elasticsearch; + }); + + after(() => { + mockRequire.stopAll(); + }); + + beforeEach(async () => { + kuzzle = new KuzzleMock(); + kuzzle.config.services.storageEngine.majorVersion = 7; + + index = "nyc-open-data"; + collection = "yellow-taxi"; + alias = "@&nyc-open-data.yellow-taxi"; + indice = "&nyc-open-data.yellow-taxi"; + timestamp = Date.now(); + + esClientError = new Error("es client fail"); + + elasticsearch = new ES(kuzzle.config.services.storageEngine); + elasticsearch.client._client = new ESClientMock("7.0.0"); + + await elasticsearch.init(); + + elasticsearch.client._esWrapper = { + reject: sinon.spy((error) => Promise.reject(error)), + formatESError: sinon.spy((error) => error), + }; + + sinon.stub(elasticsearch.client, "waitForElasticsearch").resolves(); + sinon.stub(Date, "now").returns(timestamp); + + sinon.stub(Mutex.prototype, "lock").resolves(); + sinon.stub(Mutex.prototype, "unlock").resolves(); + }); + + afterEach(() => { + Date.now.restore(); + }); + + describe("#constructor", () => { + it("should initialize properties", () => { + const esInternal = new ES( + kuzzle.config.services.storageEngine, + scopeEnum.PRIVATE, + ); + + sinon.stub(esInternal.client, "waitForElasticsearch").resolves(); + esInternal.client._client = new ESClientMock("7.0.0"); + + should(elasticsearch.config).be.exactly( + kuzzle.config.services.storageEngine, + ); + should(elasticsearch.client._indexPrefix).be.eql("&"); + should(esInternal.client._indexPrefix).be.eql("%"); + }); + }); + + describe("#init", () => { + it("should initialize properly", () => { + const promise = elasticsearch.init(); + + return should(promise) + .be.fulfilledWith() + .then(() => { + should(elasticsearch.client._client).not.be.null(); + should(elasticsearch.client._esWrapper).not.be.null(); + should(elasticsearch.client.esVersion).not.be.null(); + }); + }); + }); + + describe("#stats", () => { + beforeEach(() => { + elasticsearch.client._client.indices.stats.resolves({ + body: { + indices: { + "%kuzzle.users": { + total: { docs: { count: 1 }, store: { size_in_bytes: 10 } }, + }, + "&test-index._kuzzle_keep": { + total: { docs: { count: 0 }, store: { size_in_bytes: 10 } }, + }, + "&test-index.test-collection": { + total: { docs: { count: 2 }, store: { size_in_bytes: 20 } }, + }, + ".kibana": { + total: { docs: { count: 2 }, store: { size_in_bytes: 42 } }, + }, + ".geoip_databases": { + /* This index nativement do not return anything on index:stats call */ + }, + }, + }, + }); + sinon + .stub(elasticsearch.client, "_getAliasFromIndice") + .callsFake((indiceArg) => [`@${indiceArg}`]); + }); + + afterEach(() => { + elasticsearch.client._getAliasFromIndice.restore(); + }); + + it("should only request required stats from underlying client", async () => { + const esRequest = { + metric: ["docs", "store"], + }; + + await elasticsearch.client.stats(); + + should(elasticsearch.client._client.indices.stats) + .calledOnce() + .calledWithMatch(esRequest); + }); + + it("should as default ignore private and hidden indices", async () => { + const result = await elasticsearch.client.stats(); + + should(result).be.match({ + size: 20, + indexes: [ + { + name: "test-index", + size: 20, + collections: [ + { + name: "test-collection", + documentCount: 2, + size: 20, + }, + ], + }, + ], + }); + }); + }); + + describe("#scroll", () => { + it("should be able to scroll an old search", async () => { + const cacheStub = kuzzle.ask.withArgs("core:cache:internal:get").resolves( + JSON.stringify({ + fetched: 1, + targets: [ + { + index: "foo", + collections: ["foo"], + }, + { + index: "bar", + collections: ["bar"], + }, + ], + }), + ); + + elasticsearch.client._client.scroll.resolves({ + body: { + _scroll_id: "azerty", + hits: { + hits: [ + { _index: "&foo.foo", _id: "foo", _source: {} }, + { _index: "&bar.bar", _id: "bar", _source: {} }, + ], + total: { value: 1000 }, + }, + }, + }); + + elasticsearch.client._getAliasFromIndice = sinon.stub(); + elasticsearch.client._getAliasFromIndice + .withArgs("&foo.foo") + .returns(["@&foo.foo"]); + elasticsearch.client._getAliasFromIndice + .withArgs("&bar.bar") + .returns(["@&bar.bar"]); + + const result = await elasticsearch.client.scroll("i-am-scroll-id", { + scrollTTL: "10s", + }); + + should(cacheStub).calledOnce(); + + const redisKey = cacheStub.firstCall.args[1]; + + // 3: + // the redis key stub returns "1" (1 result fetched so far) + + // the 2 results contained in the stubbed result of _client.scroll + // 10: scrollTTL of 10s + should(kuzzle.ask).calledWith( + "core:cache:internal:store", + redisKey, + JSON.stringify({ + fetched: 3, + targets: [ + { + index: "foo", + collections: ["foo"], + }, + { + index: "bar", + collections: ["bar"], + }, + ], + }), + { ttl: 10000 }, + ); + + should(elasticsearch.client._client.clearScroll).not.called(); + + should( + elasticsearch.client._client.scroll.firstCall.args[0], + ).be.deepEqual({ + scroll: "10s", + scroll_id: "i-am-scroll-id", + }); + + should(result).be.match({ + aggregations: undefined, + hits: [ + { + _id: "foo", + _source: {}, + index: "foo", + collection: "foo", + }, + { + _id: "bar", + _source: {}, + index: "bar", + collection: "bar", + }, + ], + remaining: 997, + scrollId: "azerty", + total: 1000, + }); + }); + + it("should clear a scroll upon fetching its last page of results", async () => { + const cacheStub = kuzzle.ask.withArgs("core:cache:internal:get").resolves( + JSON.stringify({ + fetched: 998, + targets: [ + { + index: "foo", + collections: ["foo"], + }, + { + index: "bar", + collections: ["bar"], + }, + ], + }), + ); + + elasticsearch.client._client.scroll.resolves({ + body: { + hits: { + hits: [ + { _index: "&foo.foo", _id: "foo", _source: {} }, + { _index: "&bar.bar", _id: "bar", _source: {} }, + ], + total: { value: 1000 }, + }, + _scroll_id: "azerty", + }, + }); + + elasticsearch.client._getAliasFromIndice = sinon.stub(); + elasticsearch.client._getAliasFromIndice + .withArgs("&foo.foo") + .returns(["@&foo.foo"]); + elasticsearch.client._getAliasFromIndice + .withArgs("&bar.bar") + .returns(["@&bar.bar"]); + + const result = await elasticsearch.client.scroll("i-am-scroll-id", { + scrollTTL: "10s", + }); + + should(cacheStub).be.calledOnce(); + + const redisKey = cacheStub.firstCall.args[1]; + + should(kuzzle.ask).not.calledWith("core:cache:internal:store"); + should(kuzzle.ask).calledWith("core:cache:internal:del", redisKey); + + should(elasticsearch.client._client.clearScroll) + .calledOnce() + .calledWithMatch({ scroll_id: "azerty" }); + + should( + elasticsearch.client._client.scroll.firstCall.args[0], + ).be.deepEqual({ + scroll: "10s", + scroll_id: "i-am-scroll-id", + }); + + should(result).be.match({ + aggregations: undefined, + hits: [ + { + _id: "foo", + _source: {}, + index: "foo", + collection: "foo", + }, + { + _id: "bar", + _source: {}, + index: "bar", + collection: "bar", + }, + ], + remaining: 0, + scrollId: "azerty", + total: 1000, + }); + }); + + it("should reject promise if a scroll fails", async () => { + elasticsearch.client._client.scroll.rejects(esClientError); + + kuzzle.ask.withArgs("core:cache:internal:get").resolves("1"); + + await should(elasticsearch.client.scroll("i-am-scroll-id")).be.rejected(); + + should(elasticsearch.client._esWrapper.formatESError).calledWith( + esClientError, + ); + }); + + it("should reject if the scrollId does not exists in Kuzzle cache", async () => { + kuzzle.ask.withArgs("core:cache:internal:get").resolves(null); + + await should( + elasticsearch.client.scroll("i-am-scroll-id"), + ).be.rejectedWith({ + id: "services.storage.unknown_scroll_id", + }); + + should(elasticsearch.client._client.scroll).not.be.called(); + }); + + it("should reject if the scroll duration is too great", async () => { + elasticsearch._config.maxScrollDuration = "21m"; + + await should( + elasticsearch.client.scroll("i-am-scroll-id", { scrollTTL: "42m" }), + ).be.rejectedWith({ id: "services.storage.scroll_duration_too_great" }); + + should(elasticsearch.client._client.scroll).not.be.called(); + }); + + it("should default an explicitly null scrollTTL argument", async () => { + const cacheStub = kuzzle.ask + .withArgs("core:cache:internal:get", sinon.match.string) + .resolves( + JSON.stringify({ + fetched: 1, + index, + collection, + }), + ); + + elasticsearch.client._client.scroll.resolves({ + body: { + hits: { hits: [], total: { value: 1000 } }, + _scroll_id: "azerty", + }, + }); + + await elasticsearch.client.scroll("scroll-id", { scrollTTL: null }); + + should(cacheStub).calledOnce(); + should(kuzzle.ask).calledWith( + "core:cache:internal:store", + sinon.match.string, + JSON.stringify({ + fetched: 1, + index, + collection, + }), + sinon.match.object, + ); + + should( + elasticsearch.client._client.scroll.firstCall.args[0], + ).be.deepEqual({ + scroll: elasticsearch.config.defaults.scrollTTL, + scroll_id: "scroll-id", + }); + }); + }); + + describe("#search", () => { + let searchBody; + + beforeEach(() => { + searchBody = {}; + }); + + it("should join multi indexes and collections when specified with targets", async () => { + elasticsearch.client._client.search.rejects(new Error()); // Skip rest of the execution + + try { + await elasticsearch.client.search({ + targets: [ + { + index: "nyc-open-data", + collections: ["yellow-taxi", "red-taxi"], + }, + { + index: "nyc-close-data", + collections: ["green-taxi", "blue-taxi"], + }, + ], + searchBody, + }); + } catch (error) { + // Catch error since we throw to skip the rest of the execution + } finally { + should(elasticsearch.client._client.search.firstCall.args[0]).match({ + index: + "@&nyc-open-data.yellow-taxi,@&nyc-open-data.red-taxi,@&nyc-close-data.green-taxi,@&nyc-close-data.blue-taxi", + body: { query: { match_all: {} } }, + from: undefined, + size: undefined, + scroll: undefined, + trackTotalHits: true, + }); + } + }); + + it("should be able to search documents", async () => { + elasticsearch.client._client.search.resolves({ + body: { + aggregations: { some: "aggregs" }, + body: searchBody, + hits: { + hits: [ + { + _id: "liia", + _index: indice, + _source: { country: "Nepal" }, + _score: 42, + highlight: "highlight", + inner_hits: { + inner_name: { + hits: { + hits: [ + { + _id: "nestedLiia", + _source: { city: "Kathmandu" }, + }, + ], + }, + }, + }, + other: "thing", + }, + ], + total: { value: 1 }, + }, + suggest: { some: "suggest" }, + _scroll_id: "i-am-scroll-id", + }, + }); + + elasticsearch.client._getAliasFromIndice = sinon.stub(); + elasticsearch.client._getAliasFromIndice + .withArgs(indice) + .returns([alias]); + + const result = await elasticsearch.client.search({ + index, + collection, + searchBody, + }); + + should(elasticsearch.client._client.search.firstCall.args[0]).match({ + index: alias, + body: { query: { match_all: {} } }, + from: undefined, + size: undefined, + scroll: undefined, + trackTotalHits: true, + }); + + should(kuzzle.ask).calledWith( + "core:cache:internal:store", + sinon.match.string, + JSON.stringify({ + collection, + fetched: 1, + index, + }), + { ttl: ms(elasticsearch.config.defaults.scrollTTL) }, + ); + + should(result).match({ + aggregations: { some: "aggregs" }, + hits: [ + { + index, + collection, + _id: "liia", + _source: { country: "Nepal" }, + _score: 42, + highlight: "highlight", + inner_hits: { + inner_name: [ + { + _id: "nestedLiia", + _source: { city: "Kathmandu" }, + }, + ], + }, + }, + ], + remaining: 0, + suggest: { some: "suggest" }, + scrollId: "i-am-scroll-id", + total: 1, + }); + }); + + it("should be able to search with from/size and scroll arguments", async () => { + elasticsearch.client._client.search.resolves({ + body: { + hits: { hits: [], total: { value: 0 } }, + _scroll_id: "i-am-scroll-id", + }, + }); + + await elasticsearch.client.search( + { index, collection, searchBody }, + { from: 0, scroll: "30s", size: 1 }, + ); + + should(elasticsearch.client._client.search.firstCall.args[0]).match({ + body: searchBody, + from: 0, + index: alias, + scroll: "30s", + size: 1, + trackTotalHits: true, + }); + + should(kuzzle.ask).calledWith( + "core:cache:internal:store", + sinon.match.string, + JSON.stringify({ + collection, + fetched: 0, + index, + }), + { ttl: 30000 }, + ); + }); + + it("should be able to search on ES alias with invalid collection name", async () => { + elasticsearch.client._client.search.resolves({ + body: { + hits: { hits: [], total: { value: 0 } }, + }, + }); + + await elasticsearch.client.search({ + index: "main", + collection: "kuzzleData", + searchBody, + }); + + should(elasticsearch.client._client.search.firstCall.args[0]).match({ + body: searchBody, + index: "@&main.kuzzleData", + trackTotalHits: true, + }); + }); + + it("should return a rejected promise if a search fails", async () => { + elasticsearch.client._client.search.rejects(esClientError); + + await should( + elasticsearch.client.search({ index, collection, searchBody }), + ).be.rejected(); + + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + + it("should return a rejected promise if an unhautorized property is in the query", () => { + searchBody = { + not_authorized: 42, + query: {}, + }; + + return should( + elasticsearch.client.search({ index, collection, searchBody }), + ).be.rejectedWith({ id: "services.storage.invalid_search_query" }); + }); + + it("should not save the scrollId in the cache if not present in response", async () => { + elasticsearch.client._client.search.resolves({ + body: { + hits: { hits: [], total: { value: 0 } }, + }, + }); + + await elasticsearch.client.search({ index, collection, searchBody: {} }); + + should(kuzzle.ask).not.calledWith("core:cache:internal:store"); + }); + + it("should return a rejected promise if the scroll duration is too great", async () => { + elasticsearch._config.maxScrollDuration = "21m"; + + const promise = elasticsearch.client.search( + { index, collection, searchBody }, + { scroll: "42m" }, + ); + + await should(promise).be.rejectedWith({ + id: "services.storage.scroll_duration_too_great", + }); + + should(elasticsearch.client._client.search).not.be.called(); + }); + }); + + describe("#get", () => { + it("should allow getting a single document", () => { + elasticsearch.client._client.get.resolves({ + body: { + _id: "liia", + _source: { city: "Kathmandu" }, + _version: 1, + }, + }); + + const promise = elasticsearch.client.get(index, collection, "liia"); + + return promise.then((result) => { + should(elasticsearch.client._client.get).be.calledWithMatch({ + index: alias, + id: "liia", + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + }); + }); + }); + + it("should reject requests when the user search for a document with id _search", () => { + const promise = elasticsearch.client.get(index, collection, "_search"); + + return should(promise).be.rejectedWith({ + id: "services.storage.search_as_an_id", + }); + }); + + it("should return a rejected promise if a get fails", () => { + elasticsearch.client._client.get.rejects(esClientError); + + const promise = elasticsearch.client.get(index, collection, "liia"); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#mGet", () => { + it("should allow getting multiples documents", () => { + elasticsearch.client._client.mget.resolves({ + body: { + docs: [ + { + _id: "liia", + found: true, + _source: { city: "Kathmandu" }, + _version: 1, + }, + { _id: "mhery", found: false }, + ], + }, + }); + + const promise = elasticsearch.client.mGet(index, collection, [ + "liia", + "mhery", + ]); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + body: { + docs: [ + { _id: "liia", _index: alias }, + { _id: "mhery", _index: alias }, + ], + }, + }); + + should(result).match({ + items: [{ _id: "liia", _source: { city: "Kathmandu" }, _version: 1 }], + errors: ["mhery"], + }); + }); + }); + + it("should return a rejected promise if client.mget fails", () => { + elasticsearch.client._client.mget.rejects(esClientError); + + const promise = elasticsearch.client.mGet(index, collection, ["liia"]); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#mExists", () => { + it("should allow getting multiples existing documents", () => { + elasticsearch.client._client.mget.resolves({ + body: { + docs: [ + { _id: "foo", found: true }, + { _id: "bar", found: false }, + ], + }, + }); + + const promise = elasticsearch.client.mExists(index, collection, [ + "foo", + "bar", + ]); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + body: { + docs: [{ _id: "foo" }, { _id: "bar" }], + }, + index: alias, + }); + + should(result).match({ + items: ["foo"], + errors: ["bar"], + }); + }); + }); + + it("should return a rejected promise if client.mget fails", () => { + elasticsearch.client._client.mget.rejects(esClientError); + + const promise = elasticsearch.client.mExists(index, collection, ["foo"]); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#count", () => { + it("should allow counting documents using a provided filter", () => { + const filter = { + query: { + match_all: {}, + }, + }; + elasticsearch.client._client.count.resolves({ + body: { + count: 42, + }, + }); + + const promise = elasticsearch.client.count(index, collection, filter); + + return promise.then((result) => { + should(elasticsearch.client._client.count).be.calledWithMatch({ + index: alias, + body: filter, + }); + + should(result).be.eql(42); + }); + }); + + it("should return a rejected promise if count fails", () => { + elasticsearch.client._client.count.rejects(esClientError); + + const promise = elasticsearch.client.count(index, collection); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#create", () => { + it("should allow creating document an ID is provided", () => { + elasticsearch.client._client.index.resolves({ + body: { + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + }, + }); + + const promise = elasticsearch.client.create( + index, + collection, + { city: "Kathmandu" }, + { id: "liia", refresh: "wait_for", userId: "aschen" }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + body: { + city: "Kathmandu", + _kuzzle_info: { + author: "aschen", + createdAt: timestamp, + }, + }, + id: "liia", + refresh: "wait_for", + op_type: "create", + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + }); + }); + }); + + it("should create a document when no ID is provided", () => { + elasticsearch.client._client.index.resolves({ + body: { + _id: "mehry", + _version: 1, + _source: { city: "Panipokari" }, + }, + }); + + const promise = elasticsearch.client.create(index, collection, { + city: "Panipokari", + }); + + return promise.then((result) => { + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + body: { + city: "Panipokari", + _kuzzle_info: { + author: null, + }, + }, + op_type: "index", + }); + + should(result).match({ + _id: "mehry", + _version: 1, + _source: { city: "Panipokari" }, + }); + }); + }); + }); + + describe("#createOrReplace", () => { + beforeEach(() => { + elasticsearch.client._client.index.resolves({ + body: { + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + result: "created", + }, + }); + }); + + it("should support createOrReplace capability", () => { + const promise = elasticsearch.client.createOrReplace( + index, + collection, + "liia", + { city: "Kathmandu" }, + { refresh: "wait_for", userId: "aschen" }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + body: { + city: "Kathmandu", + _kuzzle_info: { + author: "aschen", + createdAt: timestamp, + updatedAt: timestamp, + updater: "aschen", + }, + }, + id: "liia", + refresh: "wait_for", + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + created: true, + }); + }); + }); + + it("should not inject meta if specified", () => { + const promise = elasticsearch.client.createOrReplace( + index, + collection, + "liia", + { city: "Kathmandu" }, + { injectKuzzleMeta: false }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + body: { + city: "Kathmandu", + _kuzzle_info: undefined, + }, + id: "liia", + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + created: true, + }); + }); + }); + + it("should return a rejected promise if client.index fails", () => { + elasticsearch.client._client.index.rejects(esClientError); + + const promise = elasticsearch.client.createOrReplace( + index, + collection, + "liia", + { + city: "Kathmandu", + }, + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#update", () => { + beforeEach(() => { + elasticsearch.client._client.update.resolves({ + body: { + _id: "liia", + _version: 1, + get: { + _source: { city: "Panipokari" }, + }, + }, + }); + }); + + it("should allow to update a document", () => { + const promise = elasticsearch.client.update(index, collection, "liia", { + city: "Panipokari", + }); + + return promise.then((result) => { + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, + }, + }, + }, + id: "liia", + refresh: undefined, + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { + city: "Panipokari", + }, + }); + }); + }); + + it("should handle optional configurations", () => { + const promise = elasticsearch.client.update( + index, + collection, + "liia", + { city: "Panipokari" }, + { refresh: "wait_for", userId: "aschen", retryOnConflict: 42 }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "aschen", + }, + }, + }, + id: "liia", + refresh: "wait_for", + _source: "true", + retry_on_conflict: 42, + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { + city: "Panipokari", + }, + }); + }); + }); + + it("should return a rejected promise if client.update fails", () => { + elasticsearch.client._client.update.rejects(esClientError); + + const promise = elasticsearch.client.update(index, collection, "liia", { + city: "Kathmandu", + }); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + it("should default an explicitly null retryOnConflict", async () => { + await elasticsearch.client.update( + index, + collection, + "liia", + { city: "Panipokari" }, + { refresh: "wait_for", userId: "oh noes", retryOnConflict: null }, + ); + + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "oh noes", + }, + }, + }, + id: "liia", + refresh: "wait_for", + _source: "true", + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }); + }); + }); + + describe("#upsert", () => { + beforeEach(() => { + elasticsearch.client._client.update.resolves({ + body: { + _id: "liia", + _version: 2, + result: "updated", + get: { + _source: { city: "Panipokari" }, + }, + }, + }); + }); + + it("should allow to upsert a document", async () => { + const result = await elasticsearch.client.upsert( + index, + collection, + "liia", + { + city: "Panipokari", + }, + ); + + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, + }, + }, + upsert: { + _kuzzle_info: { + author: null, + createdAt: timestamp, + }, + }, + }, + id: "liia", + refresh: undefined, + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }); + + should(result).match({ + _id: "liia", + _version: 2, + _source: { + city: "Panipokari", + }, + created: false, + }); + }); + + it("should handle default values for upserted documents", async () => { + const result = await elasticsearch.client.upsert( + index, + collection, + "liia", + { city: "Panipokari" }, + { + defaultValues: { oh: "noes" }, + }, + ); + + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, + }, + }, + upsert: { + oh: "noes", + _kuzzle_info: { + author: null, + createdAt: timestamp, + }, + }, + }, + id: "liia", + refresh: undefined, + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }); + + should(result).match({ + _id: "liia", + _version: 2, + _source: { + city: "Panipokari", + }, + created: false, + }); + }); + + it('should return the right "_created" result on a document creation', async () => { + elasticsearch.client._client.update.resolves({ + body: { + _id: "liia", + _version: 1, + result: "created", + get: { + _source: { city: "Panipokari" }, + }, + }, + }); + + const result = await elasticsearch.client.upsert( + index, + collection, + "liia", + { city: "Panipokari" }, + { + defaultValues: { oh: "noes" }, + }, + ); + + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, + }, + }, + upsert: { + oh: "noes", + _kuzzle_info: { + author: null, + createdAt: timestamp, + }, + }, + }, + id: "liia", + refresh: undefined, + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { + city: "Panipokari", + }, + created: true, + }); + }); + + it("should handle optional configurations", async () => { + const result = await elasticsearch.client.upsert( + index, + collection, + "liia", + { city: "Panipokari" }, + { refresh: "wait_for", userId: "aschen", retryOnConflict: 42 }, + ); + + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "aschen", + }, + }, + upsert: { + _kuzzle_info: { + author: "aschen", + createdAt: timestamp, + }, + }, + }, + id: "liia", + refresh: "wait_for", + _source: "true", + retry_on_conflict: 42, + }); + + should(result).match({ + _id: "liia", + _version: 2, + _source: { + city: "Panipokari", + }, + created: false, + }); + }); + + it("should return a rejected promise if client.upsert fails", async () => { + elasticsearch.client._client.update.rejects(esClientError); + + await should( + elasticsearch.client.upsert(index, collection, "liia", { + city: "Kathmandu", + }), + ).rejected(); + + should(elasticsearch.client._esWrapper.formatESError).calledWith( + esClientError, + ); + }); + + it("should default an explicitly null retryOnConflict", async () => { + await elasticsearch.client.upsert( + index, + collection, + "liia", + { city: "Panipokari" }, + { refresh: "wait_for", userId: "oh noes", retryOnConflict: null }, + ); + + should(elasticsearch.client._client.update).be.calledWithMatch({ + index: alias, + body: { + doc: { + city: "Panipokari", + _kuzzle_info: { + updatedAt: timestamp, + updater: "oh noes", + }, + }, + upsert: { + _kuzzle_info: { + author: "oh noes", + createdAt: timestamp, + }, + }, + }, + id: "liia", + refresh: "wait_for", + _source: "true", + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }); + }); + }); + + describe("#replace", () => { + beforeEach(() => { + elasticsearch.client._client.index.resolves({ + body: { + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + }, + }); + elasticsearch.client._client.exists.resolves({ body: true }); + }); + + it("should support replace capability", () => { + const promise = elasticsearch.client.replace(index, collection, "liia", { + city: "Kathmandu", + }); + + return promise.then((result) => { + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + id: "liia", + body: { + city: "Kathmandu", + _kuzzle_info: { + author: null, + createdAt: timestamp, + updatedAt: timestamp, + updater: null, + }, + }, + refresh: undefined, + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + }); + }); + }); + + it("should accept additional options", () => { + const promise = elasticsearch.client.replace( + index, + collection, + "liia", + { city: "Kathmandu" }, + { refresh: "wait_for", userId: "aschen" }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + id: "liia", + body: { + city: "Kathmandu", + _kuzzle_info: { + author: "aschen", + createdAt: timestamp, + updatedAt: timestamp, + updater: "aschen", + }, + }, + refresh: "wait_for", + }); + + should(result).match({ + _id: "liia", + _version: 1, + _source: { city: "Kathmandu" }, + }); + }); + }); + + it("should throw a NotFoundError Exception if document already exists", () => { + elasticsearch.client._client.exists.resolves({ body: false }); + + const promise = elasticsearch.client.replace(index, collection, "liia", { + city: "Kathmandu", + }); + + return should(promise) + .be.rejected() + .then(() => { + should( + elasticsearch.client._esWrapper.formatESError, + ).be.calledWithMatch({ + id: "services.storage.not_found", + }); + should(elasticsearch.client._client.index).not.be.called(); + }); + }); + + it("should return a rejected promise if client.index fails", () => { + elasticsearch.client._client.index.rejects(esClientError); + + const promise = elasticsearch.client.replace(index, collection, "liia", { + city: "Kathmandu", + }); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#delete", () => { + beforeEach(() => { + elasticsearch.client._client.delete.resolves({ + body: { + _id: "liia", + }, + }); + }); + + it("should allow to delete a document", () => { + const promise = elasticsearch.client.delete(index, collection, "liia"); + + return promise.then((result) => { + should(elasticsearch.client._client.delete).be.calledWithMatch({ + index: alias, + id: "liia", + refresh: undefined, + }); + + should(result).be.null(); + }); + }); + + it("should allow additional options", () => { + const promise = elasticsearch.client.delete(index, collection, "liia", { + refresh: "wait_for", + }); + + return promise.then((result) => { + should(elasticsearch.client._client.delete).be.calledWithMatch({ + index: alias, + id: "liia", + refresh: "wait_for", + }); + + should(result).be.null(); + }); + }); + + it("should return a rejected promise if client.delete fails", () => { + elasticsearch.client._client.delete.rejects(esClientError); + + const promise = elasticsearch.client.delete(index, collection, "liia"); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#updateByQuery", () => { + beforeEach(() => { + sinon.stub(elasticsearch.client, "_getAllDocumentsFromQuery").resolves([ + { _id: "_id1", _source: { name: "Ok" } }, + { _id: "_id2", _source: { name: "Ok" } }, + ]); + + sinon.stub(elasticsearch.client, "mUpdate").resolves({ + items: [ + { + _id: "_id1", + _source: { name: "bar" }, + status: 200, + }, + { + _id: "_id2", + _source: { name: "bar" }, + status: 200, + }, + ], + errors: [], + }); + + elasticsearch.client._client.indices.refresh.resolves({ + body: { _shards: 1 }, + }); + }); + + const documents = [ + { + _id: "_id1", + _source: undefined, + body: { + name: "bar", + }, + }, + { + _id: "_id2", + _source: undefined, + body: { + name: "bar", + }, + }, + ]; + + it("should have updateByQuery capability", () => { + const promise = elasticsearch.client.updateByQuery( + index, + collection, + { filter: { term: { name: "Ok" } } }, + { name: "bar" }, + ); + + return promise.then((result) => { + should(elasticsearch.client.mUpdate).be.calledWithMatch( + index, + collection, + documents, + { refresh: undefined }, + ); + + should(result).match({ + successes: [ + { + _id: "_id1", + _source: { name: "bar" }, + status: 200, + }, + { + _id: "_id2", + _source: { name: "bar" }, + status: 200, + }, + ], + errors: [], + }); + }); + }); + + it("should allow additional options", async () => { + const result = await elasticsearch.client.updateByQuery( + index, + collection, + { filter: "term" }, + { name: "bar" }, + { refresh: "wait_for", size: 3, userId: "aschen" }, + ); + + should(elasticsearch.client._getAllDocumentsFromQuery).be.calledWithMatch( + { + index: alias, + body: { query: { filter: "term" } }, + scroll: "5s", + size: 3, + }, + ); + + should(elasticsearch.client.mUpdate).be.calledWithMatch( + index, + collection, + documents, + { + refresh: "wait_for", + userId: "aschen", + }, + ); + + should(result).match({ + successes: [ + { _id: "_id1", _source: { name: "bar" }, status: 200 }, + { _id: "_id2", _source: { name: "bar" }, status: 200 }, + ], + errors: [], + }); + }); + + it("should reject if the number of impacted documents exceeds the configured limit", () => { + elasticsearch.client._getAllDocumentsFromQuery.restore(); + + elasticsearch.client._client.search.resolves({ + body: { + hits: { + hits: [], + total: { + value: 99999, + }, + }, + _scroll_id: "foobar", + }, + }); + + kuzzle.config.limits.documentsFetchCount = 2; + + return should( + elasticsearch.client.updateByQuery(index, collection, {}, {}), + ).rejectedWith(SizeLimitError, { + id: "services.storage.write_limit_exceeded", + }); + }); + }); + + describe("#bulkUpdateByQuery", () => { + let query; + let changes; + let request; + + beforeEach(() => { + query = { + match: { foo: "bar" }, + }; + changes = { + bar: "foo", + }; + + request = { + body: { + query, + script: { + params: { bar: "foo" }, + source: "ctx._source.bar = params['bar'];", + }, + }, + index: alias, + refresh: false, + }; + + elasticsearch.client._client.updateByQuery.resolves({ + body: { + total: 42, + updated: 42, + failures: [], + }, + }); + }); + + it("should have updateByQuery capabilities", async () => { + const result = await elasticsearch.client.bulkUpdateByQuery( + index, + collection, + query, + changes, + ); + + should(elasticsearch.client._client.updateByQuery).be.calledWithMatch( + request, + ); + + should(result).match({ + updated: 42, + }); + }); + + it("should allow additonnal option", async () => { + request.refresh = "wait_for"; + + await elasticsearch.client.bulkUpdateByQuery( + index, + collection, + query, + changes, + { + refresh: "wait_for", + }, + ); + + should(elasticsearch.client._client.updateByQuery).be.calledWithMatch( + request, + ); + }); + + it("should reject if client.updateByQuery fails", () => { + elasticsearch.client._client.updateByQuery.rejects(esClientError); + + const promise = elasticsearch.client.bulkUpdateByQuery( + index, + collection, + query, + changes, + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + it("should reject if some failures occur", () => { + elasticsearch.client._client.updateByQuery.resolves({ + body: { + total: 3, + updated: 2, + failures: [{ shardId: 42, reason: "error", foo: "bar" }], + }, + }); + + const promise = elasticsearch.client.bulkUpdateByQuery( + index, + collection, + query, + changes, + ); + + return should(promise).be.rejectedWith(MultipleErrorsError, { + id: "services.storage.incomplete_update", + message: + "2 documents were successfully updated before an error occured", + }); + }); + }); + + describe("#deleteByQuery", () => { + beforeEach(() => { + sinon.stub(elasticsearch.client, "_getAllDocumentsFromQuery").resolves([ + { _id: "_id1", _source: "_source1" }, + { _id: "_id2", _source: "_source2" }, + ]); + + elasticsearch.client._client.deleteByQuery.resolves({ + body: { + total: 2, + deleted: 1, + failures: [{ shardId: 42, reason: "error", foo: "bar" }], + }, + }); + }); + + it("should have deleteByQuery capability", async () => { + const result = await elasticsearch.client.deleteByQuery( + index, + collection, + { + filter: "term", + }, + ); + + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { filter: "term" } }, + scroll: "5s", + from: undefined, + size: 1000, + refresh: undefined, + }); + + should(elasticsearch.client._getAllDocumentsFromQuery).be.calledWithMatch( + { + index: alias, + body: { query: { filter: "term" } }, + scroll: "5s", + from: undefined, + size: 1000, + refresh: undefined, + }, + ); + + should(result).match({ + documents: [ + { _id: "_id1", _source: "_source1" }, + { _id: "_id2", _source: "_source2" }, + ], + total: 2, + deleted: 1, + failures: [{ shardId: 42, reason: "error" }], + }); + }); + + it("should allow additional options", async () => { + const result = await elasticsearch.client.deleteByQuery( + index, + collection, + { filter: "term" }, + { refresh: "wait_for", from: 1, size: 3 }, + ); + + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { filter: "term" } }, + size: 3, + refresh: true, + }); + + should(result).match({ + total: 2, + deleted: 1, + failures: [{ shardId: 42, reason: "error" }], + }); + }); + + it("should not fetch documents if fetch=false", async () => { + const result = await elasticsearch.client.deleteByQuery( + index, + collection, + { filter: "term" }, + { fetch: false }, + ); + + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { filter: "term" } }, + scroll: "5s", + from: undefined, + size: 1000, + refresh: undefined, + }); + + should(elasticsearch.client._getAllDocumentsFromQuery).not.be.called(); + + should(result).match({ + documents: [], + total: 2, + deleted: 1, + failures: [{ shardId: 42, reason: "error" }], + }); + }); + + it("should rejects if client.deleteByQuery fails", () => { + elasticsearch.client._client.deleteByQuery.rejects(esClientError); + + const promise = elasticsearch.client.deleteByQuery(index, collection, { + filter: "term", + }); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + it("should reject if the query is empty", () => { + const promise = elasticsearch.client.deleteByQuery( + index, + collection, + "not an object", + ); + + return should(promise).be.rejectedWith({ + id: "services.storage.missing_argument", + }); + }); + + it("should reject if the number of impacted documents exceeds the configured limit", () => { + elasticsearch.client._getAllDocumentsFromQuery.restore(); + + elasticsearch.client._client.search.resolves({ + body: { + hits: { + hits: [], + total: { + value: 99999, + }, + }, + _scroll_id: "foobar", + }, + }); + + kuzzle.config.limits.documentsFetchCount = 2; + + return should( + elasticsearch.client.deleteByQuery(index, collection, {}), + ).rejectedWith(SizeLimitError, { + id: "services.storage.write_limit_exceeded", + }); + }); + }); + + describe("#deleteFields", () => { + beforeEach(() => { + elasticsearch.client._client.get.resolves({ + body: { + _id: "liia", + _version: 1, + _source: { city: "Kathmandu", useless: "somevalue" }, + }, + }); + + elasticsearch.client._client.index.resolves({ + body: { + _id: "liia", + _version: 2, + _source: { city: "Kathmandu" }, + }, + }); + }); + + it("should support field removal capability", () => { + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + ); + + return promise.then((result) => { + should(elasticsearch.client._client.get).be.calledWithMatch({ + index: alias, + id: "liia", + }); + + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + id: "liia", + body: { + city: "Kathmandu", + _kuzzle_info: { + updatedAt: timestamp, + updater: null, + }, + }, + refresh: undefined, + }); + + should(result).match({ + _id: "liia", + _version: 2, + _source: { city: "Kathmandu" }, + }); + }); + }); + + it("should accept additional options", () => { + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + { refresh: "wait_for", userId: "aschen" }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.get).be.calledWithMatch({ + index: alias, + id: "liia", + }); + + should(elasticsearch.client._client.index).be.calledWithMatch({ + index: alias, + id: "liia", + body: { + city: "Kathmandu", + _kuzzle_info: { + updatedAt: timestamp, + updater: "aschen", + }, + }, + refresh: "wait_for", + }); + + should(result).match({ + _id: "liia", + _version: 2, + _source: { city: "Kathmandu" }, + }); + }); + }); + + it("should throw a NotFoundError Exception if document does not exists", () => { + elasticsearch.client._client.get.rejects(esClientError); + + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + should(elasticsearch.client._client.index).not.be.called(); + }); + }); + + it("should return a rejected promise if client.index fails", () => { + elasticsearch.client._client.index.rejects(esClientError); + + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#mExecute", () => { + it("should call the callback method with each batch returned by ES", async () => { + const hits1 = { + hits: [21, 42, 84], + total: { + value: 5, + }, + }; + const hits2 = { + hits: [168, 336], + total: { + value: 5, + }, + }; + const callbackStub = sinon + .stub() + .onCall(0) + .resolves(1) + .onCall(1) + .resolves(2); + + elasticsearch.client._client.search.callsArgWith(1, null, { + body: { hits: hits1 }, + _scroll_id: "scroll-id", + }); + + elasticsearch.client._client.scroll.callsArgWith(1, null, { + body: { hits: hits2 }, + _scroll_id: "scroll-id", + }); + + const result = await elasticsearch.client.mExecute( + index, + collection, + { match: 21 }, + callbackStub, + ); + + should(result).match([1, 2]); + + should(elasticsearch.client._client.search.getCall(0).args[0]).match({ + index: alias, + body: { query: { match: 21 } }, + scroll: "5s", + from: 0, + size: 10, + }); + + should(callbackStub).be.calledTwice(); + should(callbackStub.getCall(0).args[0]).be.eql(hits1.hits); + should(callbackStub.getCall(1).args[0]).be.eql(hits2.hits); + }); + + it("should reject if the query is empty", () => { + const promise = elasticsearch.client.mExecute( + index, + collection, + "not an object", + () => {}, + ); + + return should(promise).be.rejectedWith({ + id: "services.storage.missing_argument", + }); + }); + }); + + describe("#createIndex", () => { + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [{ alias: alias }, { alias: "@%nepali.liia" }], + }); + sinon.stub(elasticsearch.client, "_createHiddenCollection").resolves(); + }); + + afterEach(() => { + elasticsearch.client._createHiddenCollection.restore(); + }); + + it("should resolve and create a hidden collection if the index does not exist", async () => { + await elasticsearch.client.createIndex("lfiduras"); + + should(elasticsearch.client._createHiddenCollection).be.calledWithMatch( + "lfiduras", + ); + }); + + it("should reject if the index already exists", () => { + return should(elasticsearch.client.createIndex("nepali")).be.rejectedWith( + PreconditionError, + { id: "services.storage.index_already_exists" }, + ); + }); + + it("should return a rejected promise if client.cat.indices fails", () => { + elasticsearch.client._client.cat.aliases.rejects(esClientError); + + const promise = elasticsearch.client.createIndex(index, collection, { + filter: "term", + }); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + it("should reject if the index name is invalid", () => { + sinon.stub(elasticsearch.client, "isIndexNameValid").returns(false); + + return should(elasticsearch.client.createIndex("foobar")).rejectedWith( + BadRequestError, + { id: "services.storage.invalid_index_name" }, + ); + }); + }); + + describe("#createCollection", () => { + let _checkMappings; + + beforeEach(() => { + _checkMappings = elasticsearch.client._checkMappings; + + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.client.hasCollection = sinon.stub().resolves(false); + elasticsearch.client._checkMappings = sinon.stub().resolves(); + + sinon.stub(elasticsearch.client, "_createHiddenCollection").resolves(); + sinon.stub(elasticsearch.client, "_hasHiddenCollection").resolves(false); + sinon.stub(elasticsearch.client, "deleteCollection").resolves(); + sinon.stub(elasticsearch.client, "_getAvailableIndice").resolves(indice); + sinon.stub(elasticsearch.client, "_getWaitForActiveShards").returns("1"); + }); + + afterEach(() => { + elasticsearch.client._getAvailableIndice.restore(); + }); + + it("should allow creating a new collection and inject commonMappings", async () => { + const settings = { index: { blocks: { write: true } } }; + const mappings = { properties: { city: { type: "keyword" } } }; + + const result = await elasticsearch.client.createCollection( + index, + collection, + { + mappings, + settings, + }, + ); + + should(elasticsearch.client.hasCollection).be.calledWith( + index, + collection, + ); + should(elasticsearch.client._checkMappings).be.calledWithMatch({ + properties: mappings.properties, + }); + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: indice, + body: { + aliases: { [alias]: {} }, + mappings: { + dynamic: elasticsearch.config.commonMapping.dynamic, + _meta: elasticsearch.config.commonMapping._meta, + properties: mappings.properties, + }, + settings: { index: { blocks: { write: true } } }, + }, + }); + + should(result).be.null(); + should(elasticsearch.client.deleteCollection).not.be.called(); + }); + + it("should delete the hidden collection if present", async () => { + elasticsearch.client._hasHiddenCollection.resolves(true); + + await elasticsearch.client.createCollection(index, collection, {}); + + should(Mutex.prototype.lock).be.called(); + should(Mutex.prototype.unlock).be.called(); + should(elasticsearch.client._hasHiddenCollection).be.calledWith(index); + should(elasticsearch.client.deleteCollection).be.calledWith( + index, + "_kuzzle_keep", + ); + }); + + it("should allow to set dynamic and _meta fields", async () => { + const mappings = { dynamic: "true", _meta: { some: "meta" } }; + + const result = await elasticsearch.client.createCollection( + index, + collection, + { + mappings, + }, + ); + + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: indice, + body: { + aliases: { [alias]: {} }, + mappings: { + dynamic: "true", + _meta: { some: "meta" }, + properties: elasticsearch.config.commonMapping.properties, + }, + }, + }); + + should(result).be.null(); + }); + + it("should return a rejected promise if client.indices.create fails", () => { + elasticsearch.client._client.indices.create.rejects(esClientError); + + const promise = elasticsearch.client.createCollection(index, collection, { + mappings: { properties: { city: { type: "keyword" } } }, + }); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + it("should not reject when a race condition occur between exists and create methods", () => { + const esReject = new Error("foo"); + + esReject.meta = { + body: { + error: { + type: "resource_already_exists_exception", + }, + }, + }; + + elasticsearch.client._client.indices.create.rejects(esReject); + + const promise = elasticsearch.client.createCollection(index, collection, { + mappings: { properties: { city: { type: "keyword" } } }, + }); + + return should(promise) + .be.fulfilled() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).not.be.called(); + }); + }); + + it("should reject with BadRequestError on wrong mapping", async () => { + elasticsearch.client._checkMappings = _checkMappings; + + const mappings = { + dinamic: "false", + properties: { + freeman: { type: "keyword" }, + }, + }; + + global.NODE_ENV = "development"; + await should( + elasticsearch.client.createCollection(index, collection, { mappings }), + ).be.rejectedWith({ + message: + 'Invalid mapping property "mappings.dinamic". Did you mean "dynamic"?', + id: "services.storage.invalid_mapping", + }); + + global.NODE_ENV = "production"; + await should( + elasticsearch.client.createCollection(index, collection, { mappings }), + ).be.rejectedWith({ + message: 'Invalid mapping property "mappings.dinamic".', + id: "services.storage.invalid_mapping", + }); + }); + + it("should reject when an incorrect dynamic property value is provided", async () => { + const mappings1 = { + dynamic: null, + }; + const mappings2 = { + properties: { + user: { + properties: { + metadata: { + dynamic: "notTooMuch", + }, + }, + }, + }, + }; + const mappings3 = { + dynamic: true, + }; + + await elasticsearch.client.createCollection(index, collection, { + mappings: mappings3, + }); + + should(elasticsearch.client._checkMappings).be.calledWithMatch({ + dynamic: "true", + }); + + await should( + elasticsearch.client.createCollection(index, collection, { + mappings: mappings1, + }), + ).be.rejectedWith({ + message: /Dynamic property value should be a string./, + id: "services.storage.invalid_mapping", + }); + + await should( + elasticsearch.client.createCollection(index, collection, { + mappings: mappings2, + }), + ).be.rejectedWith({ + message: /Incorrect dynamic property value/, + id: "services.storage.invalid_mapping", + }); + }); + + it("should call updateCollection if the collection already exists", async () => { + const settings = { index: { blocks: { write: true } } }; + const mappings = { properties: { city: { type: "keyword" } } }; + elasticsearch.client.hasCollection.resolves(true); + sinon.stub(elasticsearch.client, "updateCollection").resolves({}); + + await elasticsearch.client.createCollection(index, collection, { + mappings, + settings, + }); + + should(elasticsearch.client.hasCollection).be.calledWith( + index, + collection, + ); + should(elasticsearch.client.updateCollection).be.calledWithMatch( + index, + collection, + { + settings: { index: { blocks: { write: true } } }, + mappings: { properties: { city: { type: "keyword" } } }, + }, + ); + }); + + it("should not overwrite kuzzle commonMapping", async () => { + elasticsearch.config.commonMapping = { + dynamic: "false", + properties: { + gordon: { type: "text" }, + _kuzzle_info: { + properties: { + author: { type: "text" }, + createdAt: { type: "date" }, + updatedAt: { type: "date" }, + updater: { type: "keyword" }, + }, + }, + }, + }; + const mappings = { + properties: { + gordon: { type: "keyword" }, + freeman: { type: "keyword" }, + _kuzzle_info: { + properties: { + author: { type: "keyword" }, + }, + }, + }, + }; + + await elasticsearch.client.createCollection(index, collection, { + mappings, + }); + + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0], + expectedMapping = { + _meta: undefined, + dynamic: "false", + properties: { + gordon: { type: "text" }, + freeman: { type: "keyword" }, + _kuzzle_info: { + properties: { + author: { type: "text" }, + createdAt: { type: "date" }, + updatedAt: { type: "date" }, + updater: { type: "keyword" }, + }, + }, + }, + }; + + should(esReq.body.mappings).eql(expectedMapping); + }); + + it("should reject if the index name is invalid", () => { + sinon.stub(elasticsearch.client, "isIndexNameValid").returns(false); + + return should( + elasticsearch.client.createCollection("foo", "bar"), + ).rejectedWith(BadRequestError, { + id: "services.storage.invalid_index_name", + }); + }); + + it("should reject if the collection name is invalid", () => { + sinon.stub(elasticsearch.client, "isCollectionNameValid").returns(false); + + return should( + elasticsearch.client.createCollection("foo", "bar"), + ).rejectedWith(BadRequestError, { + id: "services.storage.invalid_collection_name", + }); + }); + + it("should use defaultSettings if none are provided", async () => { + elasticsearch.config.defaultSettings = { + number_of_replicas: 42, + number_of_shards: 66, + }; + + await elasticsearch.client.createCollection(index, collection); + + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + should(esReq.body.settings).eql(elasticsearch.config.defaultSettings); + }); + + it("should use provided settings if provided", async () => { + elasticsearch.config.defaultSettings = { + number_of_replicas: 42, + number_of_shards: 66, + }; + + const settings = { + number_of_replicas: 1, + number_of_shards: 2, + }; + + await elasticsearch.client.createCollection(index, collection, { + settings, + }); + + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + should(esReq.body.settings).eql(settings); + }); + + it("should use partially provided settings", async () => { + elasticsearch.config.defaultSettings = { + number_of_replicas: 42, + number_of_shards: 66, + }; + + const settings = { + number_of_replicas: 1, + }; + + await elasticsearch.client.createCollection(index, collection, { + settings, + }); + + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + + should(esReq.body.settings).eql({ + number_of_replicas: 1, + number_of_shards: 66, + }); + }); + + it("should wait for all shards to being active when using an Elasticsearch cluster", async () => { + elasticsearch.client._getWaitForActiveShards = sinon + .stub() + .returns("all"); + await elasticsearch.client.createCollection(index, collection); + + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + + should(esReq.wait_for_active_shards).eql("all"); + }); + + it("should only wait for one shard to being active when using a single node", async () => { + elasticsearch.client._getWaitForActiveShards = sinon.stub().returns("1"); + await elasticsearch.client.createCollection(index, collection); + + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + + should(esReq.wait_for_active_shards).eql("1"); + }); + }); + + describe("#getMapping", () => { + beforeEach(() => { + elasticsearch.client._client.indices.getMapping.resolves({ + body: { + [indice]: { + mappings: { + dynamic: true, + _meta: { lang: "npl" }, + properties: { + city: { type: "keyword" }, + _kuzzle_info: { properties: { author: { type: "keyword" } } }, + }, + }, + }, + }, + }); + + elasticsearch.client._esWrapper.getMapping = sinon + .stub() + .resolves({ foo: "bar" }); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); + }); + + afterEach(() => { + elasticsearch.client._getIndice.restore(); + }); + + it("should have getMapping capabilities", () => { + const promise = elasticsearch.client.getMapping(index, collection); + + return promise.then((result) => { + should( + elasticsearch.client._client.indices.getMapping, + ).be.calledWithMatch({ + index: indice, + }); + + should(result).match({ + dynamic: true, + _meta: { lang: "npl" }, + properties: { + city: { type: "keyword" }, + }, + }); + }); + }); + + it("should include kuzzleMeta if specified", () => { + const promise = elasticsearch.client.getMapping(index, collection, { + includeKuzzleMeta: true, + }); + + return promise.then((result) => { + should( + elasticsearch.client._client.indices.getMapping, + ).be.calledWithMatch({ + index: indice, + }); + + should(result).match({ + dynamic: true, + _meta: { lang: "npl" }, + properties: { + city: { type: "keyword" }, + _kuzzle_info: { properties: { author: { type: "keyword" } } }, + }, + }); + }); + }); + + it("should return a rejected promise if client.cat.indices fails", () => { + elasticsearch.client._client.indices.getMapping.rejects(esClientError); + + const promise = elasticsearch.client.getMapping(index, collection); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#updateCollection", () => { + let oldSettings, settings, mappings; + + beforeEach(() => { + oldSettings = { + body: { + [indice]: { + settings: { + index: { + creation_date: Date.now(), + provided_name: "hello_world", + uuid: "some-u-u-i-d", + version: { no: 4242 }, + blocks: { write: false }, + }, + }, + }, + }, + }; + settings = { index: { blocks: { write: true } } }; + mappings = { properties: { city: { type: "keyword" } } }; + + elasticsearch.client._client.indices.getSettings.resolves(oldSettings); + elasticsearch.client.updateMapping = sinon.stub().resolves(); + elasticsearch.client.updateSettings = sinon.stub().resolves(); + elasticsearch.client.updateSearchIndex = sinon.stub().resolves(); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); + }); + + afterEach(() => { + elasticsearch.client._getIndice.restore(); + }); + + it("should call updateSettings, updateMapping", async () => { + elasticsearch.client.getMapping = sinon.stub().resolves({ + dynamic: "true", + properties: { city: { type: "keyword" }, dynamic: "false" }, + }); + await elasticsearch.client.updateCollection(index, collection, { + mappings, + settings, + }); + + should(elasticsearch.client.updateSettings).be.calledWith( + index, + collection, + settings, + ); + should(elasticsearch.client.updateMapping).be.calledWith( + index, + collection, + mappings, + ); + }); + + it("should call updateSettings and updateMapping", async () => { + elasticsearch.client.getMapping = sinon.stub().resolves({ + dynamic: "false", + properties: { city: { type: "keyword" } }, + }); + await elasticsearch.client.updateCollection(index, collection, { + mappings, + settings, + }); + + should(elasticsearch.client.updateSettings).be.calledWith( + index, + collection, + settings, + ); + should(elasticsearch.client.updateMapping).be.calledWith( + index, + collection, + mappings, + ); + should(elasticsearch.client.updateSearchIndex).not.be.called(); + }); + + it("should revert settings if updateMapping fail", () => { + elasticsearch.client.getMapping = sinon.stub().resolves({ + dynamic: "true", + properties: { city: { type: "keyword" } }, + }); + elasticsearch.client.updateMapping.rejects(); + + const promise = elasticsearch.client.updateCollection(index, collection, { + mappings, + settings, + }); + + return should(promise) + .be.rejected() + .then(() => { + should( + elasticsearch.client._client.indices.getSettings, + ).be.calledWithMatch({ + index: indice, + }); + should(elasticsearch.client.updateSettings).be.calledTwice(); + should(elasticsearch.client.updateMapping).be.calledOnce(); + should(elasticsearch.client.updateSettings.getCall(1).args).be.eql([ + index, + collection, + { index: { blocks: { write: false } } }, + ]); + }); + }); + + it("should calls updateSearchIndex if dynamic change from false to true", async () => { + elasticsearch.client.getMapping = sinon.stub().resolves({ + properties: { + content: { + dynamic: "false", + }, + }, + }); + const newMappings = { + properties: { + content: { + dynamic: true, + }, + }, + }; + + await elasticsearch.client.updateCollection(index, collection, { + mappings: newMappings, + }); + + should(elasticsearch.client.updateSearchIndex).be.calledOnce(); + }); + }); + + describe("#updateMapping", () => { + let newMapping, existingMapping, _checkMappings; + + beforeEach(() => { + _checkMappings = elasticsearch.client._checkMappings; + + newMapping = { + properties: { + name: { type: "keyword" }, + }, + }; + + existingMapping = { + dynamic: "strict", + _meta: { meta: "data" }, + properties: { + city: { type: "keyword" }, + _kuzzle_info: { + properties: { + author: { type: "keyword" }, + }, + }, + }, + }; + + elasticsearch.client.getMapping = sinon.stub().resolves(existingMapping); + elasticsearch.client._client.indices.putMapping.resolves({}); + elasticsearch.client._checkMappings = sinon.stub().resolves(); + }); + + it("should have mapping capabilities", () => { + const promise = elasticsearch.client.updateMapping( + index, + collection, + newMapping, + ); + + return promise.then((result) => { + should( + elasticsearch.client._client.indices.putMapping, + ).be.calledWithMatch({ + index: alias, + body: { + dynamic: "strict", + _meta: { meta: "data" }, + properties: { + name: { type: "keyword" }, + }, + }, + }); + + should(result).match({ + dynamic: "strict", + _meta: { meta: "data" }, + properties: { + city: { type: "keyword" }, + name: { type: "keyword" }, + _kuzzle_info: { + properties: { + author: { type: "keyword" }, + }, + }, + }, + }); + }); + }); + + it("should reject with BadRequestError on wrong mapping", async () => { + elasticsearch.client._checkMappings = _checkMappings; + newMapping = { + dinamic: "false", + properties: { + freeman: { type: "keyword" }, + }, + }; + + global.NODE_ENV = "development"; + await should( + elasticsearch.client.updateMapping(index, collection, newMapping), + ).be.rejectedWith({ + message: + 'Invalid mapping property "mappings.dinamic". Did you mean "dynamic"?', + id: "services.storage.invalid_mapping", + }); + + global.NODE_ENV = "production"; + await should( + elasticsearch.client.updateMapping(index, collection, newMapping), + ).be.rejectedWith({ + message: 'Invalid mapping property "mappings.dinamic".', + id: "services.storage.invalid_mapping", + }); + }); + + it("should replace dynamic and _meta", () => { + existingMapping = { + dynamic: "true", + _meta: { some: "meta" }, + }; + newMapping = { + dynamic: "false", + _meta: { other: "meta" }, + }; + + const promise = elasticsearch.client.updateMapping( + index, + collection, + newMapping, + ); + + return promise.then((result) => { + should( + elasticsearch.client._client.indices.putMapping, + ).be.calledWithMatch({ + index: alias, + body: { + dynamic: "false", + _meta: { other: "meta" }, + }, + }); + + should(result).match({ + dynamic: "false", + _meta: { other: "meta" }, + }); + }); + }); + + it("should return a rejected promise if client.cat.indices fails", () => { + elasticsearch.client._client.indices.putMapping.rejects(esClientError); + + const promise = elasticsearch.client.updateMapping( + index, + collection, + newMapping, + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#updateSettings", () => { + let newSettings; + + beforeEach(() => { + newSettings = { + index: { + blocks: { + write: true, + }, + }, + }; + }); + + it("should allow to change indice settings", async () => { + const result = await elasticsearch.client.updateSettings( + index, + collection, + newSettings, + ); + + should( + elasticsearch.client._client.indices.putSettings, + ).be.calledWithMatch({ + index: alias, + body: { + index: { + blocks: { + write: true, + }, + }, + }, + }); + + should(result).be.null(); + }); + + it("should close then open the index when changing the analyzers", async () => { + newSettings.analysis = { + analyzer: { customer_analyzers: {} }, + }; + + await elasticsearch.client.updateSettings(index, collection, newSettings); + + should(elasticsearch.client._client.indices.close).be.calledWithMatch({ + index: alias, + }); + should(elasticsearch.client._client.indices.open).be.calledWithMatch({ + index: alias, + }); + }); + + it("should return a rejected promise if client.cat.putSettings fails", () => { + elasticsearch.client._client.indices.putSettings.rejects(esClientError); + + const promise = elasticsearch.client.updateSettings( + index, + collection, + newSettings, + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#updateSearchIndex", () => { + it("should call updateByQuery", async () => { + elasticsearch.client._client.updateByQuery = sinon.stub().resolves(); + + await elasticsearch.client.updateSearchIndex(index, collection); + + should(elasticsearch.client._client.updateByQuery).be.calledWithMatch({ + body: {}, + conflicts: "proceed", + index: alias, + refresh: true, + wait_for_completion: false, + }); + }); + }); + + describe("#truncateCollection", () => { + let existingMapping; + + beforeEach(() => { + existingMapping = { + dynamic: "false", + properties: { + name: { type: "keyword" }, + }, + }; + + elasticsearch.client.getMapping = sinon.stub().resolves(existingMapping); + + elasticsearch.client._client.indices.getSettings.resolves({ + body: { + "&nyc-open-data.yellow-taxi": { + settings: { + analysis: { + analyzers: { + custom_analyzer: { + type: "simple", + }, + }, + }, + }, + }, + }, + }); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); + sinon.stub(elasticsearch.client, "_getWaitForActiveShards").resolves(1); + }); + + afterEach(() => { + elasticsearch.client._getIndice.restore(); + }); + + it("should delete and then create the collection with the same mapping", async () => { + const result = await elasticsearch.client.truncateCollection( + index, + collection, + ); + + should(elasticsearch.client.getMapping).be.calledWith(index, collection); + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ + index: indice, + }); + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: indice, + body: { + aliases: { [alias]: {} }, + mappings: { + dynamic: "false", + properties: { + name: { type: "keyword" }, + }, + }, + settings: { + analysis: { + analyzers: { + custom_analyzer: { + type: "simple", + }, + }, + }, + }, + }, + }); + should( + elasticsearch.client._client.indices.getSettings, + ).be.calledWithMatch({ + index: indice, + }); + should(result).be.null(); + }); + + it("should return a rejected promise if client fails", () => { + elasticsearch.client._client.indices.delete.rejects(esClientError); + + const promise = elasticsearch.client.truncateCollection( + index, + collection, + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + it("should wait for all shards to be active when using an Elasticsearch cluster", async () => { + elasticsearch.client._getWaitForActiveShards = sinon + .stub() + .resolves("all"); + + await elasticsearch.client.truncateCollection(index, collection); + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + + should(esReq.wait_for_active_shards).eql("all"); + }); + + it("should only wait for the primary shard to be active when using a single node", async () => { + elasticsearch.client._getWaitForActiveShards = sinon.stub().resolves("1"); + + await elasticsearch.client.truncateCollection(index, collection); + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; + + should(esReq.wait_for_active_shards).eql("1"); + }); + }); + + describe("#import", () => { + let getExpectedEsRequest, bulkReturnError, documents, bulkReturn; + + beforeEach(() => { + getExpectedEsRequest = ({ userId = null, refresh, timeout } = {}) => ({ + body: [ + { index: { _id: 1, _index: alias } }, + { + firstName: "foo", + _kuzzle_info: { + author: userId, + createdAt: timestamp, + updater: null, + updatedAt: null, + }, + }, + + { index: { _id: 2, _index: alias, _type: undefined } }, + { + firstName: "bar", + _kuzzle_info: { + author: userId, + createdAt: timestamp, + updater: null, + updatedAt: null, + }, + }, + + { update: { _id: 3, _index: alias } }, + { + doc: { + firstName: "foobar", + _kuzzle_info: { + updater: userId, + updatedAt: timestamp, + }, + }, + }, + + { delete: { _id: 4, _index: alias } }, + ], + refresh, + timeout, + }); + + bulkReturn = { + body: { + items: [ + { index: { status: 201, _id: 1, toto: 42 } }, + { index: { status: 201, _id: 2, toto: 42 } }, + { update: { status: 200, _id: 3, toto: 42 } }, + { delete: { status: 200, _id: 4, toto: 42 } }, + ], + errors: false, + }, + }; + + bulkReturnError = { + body: { + items: [ + { index: { status: 201, _id: 1, toto: 42 } }, + { index: { status: 201, _id: 2, toto: 42 } }, + { + update: { + status: 404, + _id: 42, + error: { type: "not_found", reason: "not found", toto: 42 }, + }, + }, + { + delete: { + status: 404, + _id: 21, + error: { type: "not_found", reason: "not found", toto: 42 }, + }, + }, + ], + errors: true, + }, + }; + + documents = [ + { index: { _id: 1, _index: "overwrite-me" } }, + { firstName: "foo" }, + + { index: { _id: 2, _type: "delete-me" } }, + { firstName: "bar" }, + + { update: { _id: 3 } }, + { doc: { firstName: "foobar" } }, + + { delete: { _id: 4 } }, + ]; + + elasticsearch.client._client.bulk.resolves(bulkReturn); + }); + + it("should support bulk data import", () => { + documents = [ + { index: { _id: 1 } }, + { firstName: "foo" }, + + { index: { _id: 2, _type: undefined } }, + { firstName: "bar" }, + + { update: { _id: 3 } }, + { doc: { firstName: "foobar" } }, + + { delete: { _id: 4 } }, + ]; + + const promise = elasticsearch.client.import(index, collection, documents); + + return promise.then((result) => { + should(elasticsearch.client._client.bulk).be.calledWithMatch( + getExpectedEsRequest(), + ); + + should(result).match({ + items: [ + { index: { status: 201, _id: 1 } }, + { index: { status: 201, _id: 2 } }, + { update: { status: 200, _id: 3 } }, + { delete: { status: 200, _id: 4 } }, + ], + errors: [], + }); + }); + }); + + it("should inject additional options to esRequest", () => { + const promise = elasticsearch.client.import( + index, + collection, + documents, + { + refresh: "wait_for", + timeout: "10m", + userId: "aschen", + }, + ); + + return promise.then(() => { + should(elasticsearch.client._client.bulk).be.calledWithMatch( + getExpectedEsRequest({ + refresh: "wait_for", + timeout: "10m", + userId: "aschen", + }), + ); + }); + }); + + it('should populate "errors" array for bulk data import with some errors', () => { + elasticsearch.client._client.bulk.resolves(bulkReturnError); + + const promise = elasticsearch.client.import(index, collection, documents); + + return promise.then((result) => { + should(result).match({ + items: [ + { index: { status: 201, _id: 1 } }, + { index: { status: 201, _id: 2 } }, + ], + errors: [ + { + update: { + status: 404, + _id: 42, + error: { type: "not_found", reason: "not found" }, + }, + }, + { + delete: { + status: 404, + _id: 21, + error: { type: "not_found", reason: "not found" }, + }, + }, + ], + }); + }); + }); + + it("should return a rejected promise if client fails", () => { + elasticsearch.client._client.bulk.rejects(esClientError); + + const promise = elasticsearch.client.import(index, collection, documents); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#listCollections", () => { + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@&nepali.mehry" }, + { alias: "@&nepali.liia" }, + { alias: "@&nyc-open-data.taxi" }, + { alias: "@&nepali._kuzzle_keep" }, + ], + }); + }); + + it("should allow listing all available collections", () => { + const promise = elasticsearch.client.listCollections("nepali"); + + return promise.then((result) => { + should(result).match(["mehry", "liia"]); + }); + }); + + it("should not list unauthorized collections", () => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@%nepali.mehry" }, + { alias: "@%nepali.liia" }, + { alias: "@%nyc-open-data.taxi" }, + ], + }); + + const promise = elasticsearch.client.listCollections("nepali"); + + return promise.then((result) => { + should(result).match([]); + }); + }); + + it("should return a rejected promise if client fails", async () => { + elasticsearch.client._client.cat.aliases.rejects(esClientError); + + await should(elasticsearch.client.listCollections(index)).be.rejected(); + + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + describe("#listIndexes", () => { + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@&nepali.mehry" }, + { alias: "@&nepali.liia" }, + { alias: "@&nyc-open-data.taxi" }, + ], + }); + }); + + it("should allow listing all available indexes", () => { + const promise = elasticsearch.client.listIndexes(); + + return promise.then((result) => { + should(elasticsearch.client._client.cat.aliases).be.calledWithMatch({ + format: "json", + }); + + should(result).match(["nepali", "nyc-open-data"]); + }); + }); + + it("should not list unauthorized indexes", () => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@%nepali.mehry" }, + { alias: "@%nepali.liia" }, + { alias: "@%nyc-open-data.taxi" }, + { alias: "@&vietnam.lfiduras" }, + ], + }); + + const promise = elasticsearch.client.listIndexes(); + + return promise.then((result) => { + should(result).match(["vietnam"]); + }); + }); + + it("should return a rejected promise if client fails", async () => { + elasticsearch.client._client.cat.aliases.rejects(esClientError); + + await should(elasticsearch.client.listIndexes()).be.rejected(); + + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + describe("#listAliases", () => { + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { index: "&nepalu.mehry", alias: "@&nepali.mehry" }, + { index: "&nepali.lia", alias: "@&nepali.liia" }, + { index: "&nyc-open-data.taxi", alias: "@&nyc-open-data.taxi" }, + ], + }); + }); + + it("should allow listing all available aliases", async () => { + const result = await elasticsearch.client.listAliases(); + + should(elasticsearch.client._client.cat.aliases).be.calledWithMatch({ + format: "json", + }); + + should(result).match([ + { + alias: "@&nepali.mehry", + index: "nepali", + collection: "mehry", + indice: "&nepalu.mehry", + }, + { + alias: "@&nepali.liia", + index: "nepali", + collection: "liia", + indice: "&nepali.lia", + }, + { + alias: "@&nyc-open-data.taxi", + index: "nyc-open-data", + collection: "taxi", + indice: "&nyc-open-data.taxi", + }, + ]); + }); + + it("should not list unauthorized aliases", async () => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { index: "%nepalu.mehry", alias: "@%nepali.mehry" }, + { index: "%nepali.lia", alias: "@%nepali.liia" }, + { index: "%nyc-open-data.taxi", alias: "@%nyc-open-data.taxi" }, + { index: "&vietnam.lfiduras", alias: "@&vietnam.lfiduras" }, + ], + }); + + const result = await elasticsearch.client.listAliases(); + + should(result).match([ + { + alias: "@&vietnam.lfiduras", + index: "vietnam", + collection: "lfiduras", + indice: "&vietnam.lfiduras", + }, + ]); + }); + + it("should return a rejected promise if client fails", async () => { + elasticsearch.client._client.cat.aliases.rejects(esClientError); + + await should(elasticsearch.client.listAliases()).be.rejected(); + + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + describe("#deleteIndexes", () => { + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@&nepali.mehry", index: "&nepali.mehry" }, + { alias: "@&nepali.liia", index: "&nepali.liia" }, + { alias: "@&do-not.delete", index: "&do-not.delete" }, + { alias: "@&nyc-open-data.taxi", index: "&nyc-open-data.taxi" }, + ], + }); + }); + + it("should allow to deletes multiple indexes", () => { + const promise = elasticsearch.client.deleteIndexes([ + "nepali", + "nyc-open-data", + ]); + + return promise.then((result) => { + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ + index: ["&nepali.mehry", "&nepali.liia", "&nyc-open-data.taxi"], + }); + + should(result).match(["nepali", "nyc-open-data"]); + }); + }); + + it("should not delete unauthorized indexes", () => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@&nepali.mehry", index: "&nepali.mehry" }, + { alias: "@&nepali.liia", index: "&nepali.liia" }, + { alias: "@&do-not.delete", index: "&do-not.delete" }, + { alias: "@%nyc-open-data.taxi", index: "%nyc-open-data.taxi" }, + ], + }); + + const promise = elasticsearch.client.deleteIndexes([ + "nepali", + "nyc-open-data", + ]); + + return promise.then((result) => { + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ + index: ["&nepali.mehry", "&nepali.liia"], + }); + + should(result).match(["nepali"]); + }); + }); + + it("should return a rejected promise if client fails", async () => { + elasticsearch.client._client.cat.aliases.rejects(esClientError); + + await should(elasticsearch.client.listIndexes()).be.rejected(); + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + + describe("#deleteIndex", () => { + it("should call deleteIndexes", () => { + elasticsearch.client.deleteIndexes = sinon.stub().resolves(); + + const promise = elasticsearch.client.deleteIndex("nepali"); + + return promise.then((result) => { + should(elasticsearch.client.deleteIndexes).be.calledWith(["nepali"]); + + should(result).be.null(); + }); + }); + }); + + describe("#deleteCollection", () => { + beforeEach(() => { + sinon.stub(elasticsearch.client, "_createHiddenCollection").resolves(); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); + sinon + .stub(elasticsearch.client, "_checkIfAliasExists") + .resolves(undefined); + }); + + afterEach(() => { + elasticsearch.client._getIndice.restore(); + }); + + it("should allow to delete a collection", async () => { + const result = await elasticsearch.client.deleteCollection( + index, + collection, + ); + + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ + index: indice, + }); + + should(result).be.null(); + + should(elasticsearch.client._createHiddenCollection).be.called(); + }); + + it("should create the hidden collection if the index is empty", async () => { + await elasticsearch.client.deleteCollection(index, collection); + + should(elasticsearch.client._createHiddenCollection).be.called(); + }); + + it("should delete the remaining alias if it still exists", async () => { + elasticsearch.client._checkIfAliasExists.resolves(["myalias"]); + elasticsearch.client._client.indices.deleteAlias = sinon + .stub() + .resolves(); + + await elasticsearch.client.deleteCollection(index, collection); + + should(elasticsearch.client._client.indices.deleteAlias).be.called(); + }); + }); + + describe("#refreshCollection", () => { + it("should send a valid request to es client", () => { + elasticsearch.client._client.indices.refresh.resolves({ + body: { _shards: "shards" }, + }); + + const promise = elasticsearch.client.refreshCollection(index, collection); + + return promise.then((result) => { + should(elasticsearch.client._client.indices.refresh).be.calledWithMatch( + { + index: alias, + }, + ); + + should(result).match({ + _shards: "shards", + }); + }); + }); + + it("should return a rejected promise if client fails", async () => { + elasticsearch.client._client.indices.refresh.rejects(esClientError); + + await should( + elasticsearch.client.refreshCollection(index, collection), + ).rejected(); + + should(elasticsearch.client._esWrapper.formatESError).calledWith( + esClientError, + ); + }); + }); + + describe("#exists", () => { + it("should have document exists capability", () => { + elasticsearch.client._client.exists.resolves({ + body: true, + }); + + const promise = elasticsearch.client.exists(index, collection, "liia"); + + return promise.then((result) => { + should(elasticsearch.client._client.exists).be.calledWithMatch({ + index: alias, + id: "liia", + }); + + should(result).be.eql(true); + }); + }); + + it("should return a rejected promise if client fails", () => { + elasticsearch.client._client.exists.rejects(esClientError); + + const promise = elasticsearch.client.exists(index, collection, "liia"); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#hasIndex", () => { + it("should call list indexes and return true if index exists", () => { + elasticsearch.client.listIndexes = sinon + .stub() + .resolves(["nepali", "nyc-open-data"]); + + const promise = elasticsearch.client.hasIndex("nepali"); + + return promise.then((result) => { + should(elasticsearch.client.listIndexes).be.called(); + + should(result).be.eql(true); + }); + }); + + it("should call list indexes and return false if index does not exists", () => { + elasticsearch.client.listIndexes = sinon + .stub() + .resolves(["nepali", "nyc-open-data"]); + + const promise = elasticsearch.client.hasIndex("vietnam"); + + return promise.then((result) => { + should(elasticsearch.client.listIndexes).be.called(); + + should(result).be.eql(false); + }); + }); + }); + + describe("#hasCollection", () => { + it("should call list collections and return true if collection exists", () => { + elasticsearch.client.listCollections = sinon + .stub() + .resolves(["liia", "mehry"]); + + const promise = elasticsearch.client.hasCollection("nepali", "liia"); + + return promise.then((result) => { + should(elasticsearch.client.listCollections).be.called(); + + should(result).be.eql(true); + }); + }); + + it("should call list collections and return false if collection does not exists", () => { + elasticsearch.client.listCollections = sinon + .stub() + .resolves(["liia", "mehry"]); + + const promise = elasticsearch.client.hasCollection("nepali", "lfiduras"); + + return promise.then((result) => { + should(elasticsearch.client.listCollections).be.called(); + + should(result).be.eql(false); + }); + }); + }); + + describe("#mCreate", () => { + let kuzzleMeta, mExecuteResult, documentsWithIds, documentsWithoutIds; + + beforeEach(() => { + kuzzleMeta = { + _kuzzle_info: { + author: null, + createdAt: timestamp, + updater: null, + updatedAt: null, + }, + }; + + documentsWithIds = [ + { body: { city: "Kathmandu" } }, + { _id: "liia", body: { city: "Ho Chi Minh City" } }, + ]; + + documentsWithoutIds = [ + { body: { city: "Kathmandu" } }, + { body: { city: "Ho Chi Minh City" } }, + ]; + + mExecuteResult = { items: [], errors: [] }; + + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); + }); + + it("should do a mGet request if we need to get some documents", () => { + elasticsearch.client._client.mget.resolves({ + body: { + docs: [], + }, + }); + + const promise = elasticsearch.client.mCreate( + index, + collection, + documentsWithIds, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + index: alias, + body: { docs: [{ _id: "liia", _source: false }] }, + }); + + const esRequest = { + index: alias, + body: [ + { index: { _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _index: alias } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should reject already existing documents", () => { + elasticsearch.client._client.mget.resolves({ + body: { + docs: [{ _id: "liia", found: true }], + }, + }); + + const promise = elasticsearch.client.mCreate( + index, + collection, + documentsWithIds, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + index: alias, + body: { docs: [{ _id: "liia", _source: false }] }, + }); + + const esRequest = { + index: alias, + body: [ + { index: { _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [{ _source: { city: "Kathmandu", ...kuzzleMeta } }]; + const rejected = [ + { + document: { + _id: "liia", + body: { _kuzzle_info: undefined, city: "Ho Chi Minh City" }, + }, + reason: "document already exists", + status: 400, + }, + ]; + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + rejected, + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should not do a mGet request if we didn't need to get some documents", () => { + const promise = elasticsearch.client.mCreate( + index, + collection, + documentsWithoutIds, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).not.be.called(); + + const esRequest = { + index: alias, + body: [ + { index: { _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _index: alias } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should allow additional options", () => { + kuzzleMeta._kuzzle_info.author = "aschen"; + const promise = elasticsearch.client.mCreate( + index, + collection, + documentsWithoutIds, + { refresh: "wait_for", timeout: "10m", userId: "aschen" }, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).not.be.called(); + + const esRequest = { + index: alias, + body: [ + { index: { _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _index: alias } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + refresh: "wait_for", + timeout: "10m", + }; + const toImport = [ + { _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + }); + + describe("#mCreateOrReplace", () => { + let kuzzleMeta, mExecuteResult, documents; + + beforeEach(() => { + kuzzleMeta = { + _kuzzle_info: { + author: null, + createdAt: timestamp, + updater: null, + updatedAt: null, + }, + }; + + documents = [ + { _id: "mehry", body: { city: "Kathmandu" } }, + { _id: "liia", body: { city: "Ho Chi Minh City" } }, + ]; + + mExecuteResult = { items: [], errors: [] }; + + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); + }); + + it("should call _mExecute with formated documents and source flag", async () => { + const promise = elasticsearch.client.mCreateOrReplace( + index, + collection, + documents, + { source: false }, + ); + + const result = await promise; + + const esRequest = { + index: alias, + body: [ + { index: { _index: alias, _id: "mehry" } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _index: alias, _id: "liia" } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + { source: false }, + ); + + should(result).match(mExecuteResult); + }); + + it("should call _mExecute with formated documents", () => { + const promise = elasticsearch.client.mCreateOrReplace( + index, + collection, + documents, + ); + + return promise.then((result) => { + const esRequest = { + index: alias, + body: [ + { index: { _index: alias, _id: "mehry" } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _index: alias, _id: "liia" } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should allow additional options", () => { + kuzzleMeta._kuzzle_info.author = "aschen"; + + const promise = elasticsearch.client.mCreateOrReplace( + index, + collection, + documents, + { refresh: "wait_for", timeout: "10m", userId: "aschen" }, + ); + + return promise.then((result) => { + const esRequest = { + index: alias, + body: [ + { index: { _index: alias, _id: "mehry" } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _index: alias, _id: "liia" } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + refresh: "wait_for", + timeout: "10m", + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should not inject kuzzle meta when specified", () => { + const promise = elasticsearch.client.mCreateOrReplace( + index, + collection, + documents, + { injectKuzzleMeta: false }, + ); + + return promise.then((result) => { + const esRequest = { + index: alias, + body: [ + { index: { _index: alias, _id: "mehry" } }, + { city: "Kathmandu" }, + { index: { _index: alias, _id: "liia" } }, + { city: "Ho Chi Minh City" }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu" } }, + { _id: "liia", _source: { city: "Ho Chi Minh City" } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + + it('should forward the "limits" option to mExecute', async () => { + await elasticsearch.client.mCreateOrReplace( + index, + collection, + documents, + { + limits: false, + }, + ); + + const options = elasticsearch.client._mExecute.getCall(0).args[3]; + should(options.limits).be.false(); + }); + }); + + describe("#mUpdate", () => { + let kuzzleMeta, mExecuteResult, documents; + + beforeEach(() => { + kuzzleMeta = { + _kuzzle_info: { + updater: null, + updatedAt: timestamp, + }, + }; + + documents = [ + { _id: "mehry", body: { city: "Kathmandu" } }, + { _id: "liia", body: { city: "Ho Chi Minh City" } }, + ]; + + mExecuteResult = { + items: [ + { + _id: "mehry", + _source: { city: "Kathmandu" }, + get: { _source: { age: 26, city: "Kathmandu" } }, + }, + { + _id: "liia", + _source: { city: "Ho Chi Minh City" }, + get: { _source: { age: 29, city: "Ho Chi Minh City" } }, + }, + ], + errors: [], + }; + + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); + }); + + it("should call _mExecute with formated documents", () => { + const promise = elasticsearch.client.mUpdate( + index, + collection, + documents, + ); + + return promise.then((result) => { + const esRequest = { + index: alias, + body: [ + { + update: { + _index: alias, + _id: "mehry", + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }, + }, + { doc: { city: "Kathmandu", ...kuzzleMeta }, _source: true }, + { + update: { + _index: alias, + _id: "liia", + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }, + }, + { doc: { city: "Ho Chi Minh City", ...kuzzleMeta }, _source: true }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match({ + items: [ + { + _id: "mehry", + _source: { city: "Kathmandu", age: 26 }, + }, + { + _id: "liia", + _source: { city: "Ho Chi Minh City", age: 29 }, + }, + ], + errors: [], + }); + }); + }); + + it("should allow additional options", () => { + kuzzleMeta._kuzzle_info.updater = "aschen"; + + const promise = elasticsearch.client.mUpdate( + index, + collection, + documents, + { + refresh: "wait_for", + retryOnConflict: 2, + timeout: "10m", + userId: "aschen", + }, + ); + + return promise.then(() => { + const esRequest = { + index: alias, + body: [ + { update: { _index: alias, _id: "mehry", retry_on_conflict: 2 } }, + { doc: { city: "Kathmandu", ...kuzzleMeta }, _source: true }, + { update: { _index: alias, _id: "liia", retry_on_conflict: 2 } }, + { doc: { city: "Ho Chi Minh City", ...kuzzleMeta }, _source: true }, + ], + refresh: "wait_for", + timeout: "10m", + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + }); + }); + + it("should add documents without ID to rejected documents", () => { + documents = [ + { _id: "mehry", body: { city: "Kathmandu" } }, + { body: { city: "Ho Chi Minh City" } }, + ]; + + const promise = elasticsearch.client.mUpdate( + index, + collection, + documents, + ); + + return promise.then(() => { + const esRequest = { + index: alias, + body: [ + { + update: { + _index: alias, + _id: "mehry", + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }, + }, + { doc: { city: "Kathmandu", ...kuzzleMeta }, _source: true }, + ], + refresh: undefined, + timeout: undefined, + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + ]; + const rejected = [ + { + document: { + _id: undefined, + body: { _kuzzle_info: undefined, city: "Ho Chi Minh City" }, + }, + reason: "document _id must be a string", + status: 400, + }, + ]; + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + rejected, + ); + }); + }); + }); + + describe("#mUpsert", () => { + let documents; + let kuzzleUpdateMeta; + let kuzzleCreateMeta; + let esRequest; + let toImport; + let mExecuteResult; + + beforeEach(() => { + documents = [ + { _id: "mehry", changes: { city: "Kathmandu" } }, + { _id: "liia", changes: { city: "Ho Chi Minh City" } }, + ]; + + kuzzleUpdateMeta = { + _kuzzle_info: { + updater: null, + updatedAt: timestamp, + }, + }; + kuzzleCreateMeta = { + _kuzzle_info: { + author: null, + createdAt: timestamp, + }, + }; + + esRequest = { + body: [ + { + update: { + _index: alias, + _id: "mehry", + _source: true, + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }, + }, + { + doc: { city: "Kathmandu", ...kuzzleUpdateMeta }, + upsert: { city: "Kathmandu", ...kuzzleCreateMeta }, + }, + { + update: { + _index: alias, + _id: "liia", + _source: true, + retry_on_conflict: + elasticsearch.config.defaults.onUpdateConflictRetries, + }, + }, + { + doc: { city: "Ho Chi Minh City", ...kuzzleUpdateMeta }, + upsert: { city: "Ho Chi Minh City", ...kuzzleCreateMeta }, + }, + ], + refresh: undefined, + timeout: undefined, + }; + + toImport = [ + { + _id: "mehry", + _source: { + changes: { city: "Kathmandu", ...kuzzleUpdateMeta }, + default: { city: "Kathmandu", ...kuzzleCreateMeta }, + }, + }, + { + _id: "liia", + _source: { + changes: { city: "Ho Chi Minh City", ...kuzzleUpdateMeta }, + default: { city: "Ho Chi Minh City", ...kuzzleCreateMeta }, + }, + }, + ]; + + mExecuteResult = { + items: [ + { + _id: "mehry", + _source: { city: "Kathmandu" }, + created: false, + result: "updated", + get: { _source: { age: 26, city: "Kathmandu" } }, + }, + { + _id: "liia", + _source: { city: "Ho Chi Minh City" }, + created: false, + result: "updated", + get: { _source: { age: 29, city: "Ho Chi Minh City" } }, + }, + ], + errors: [], + }; + + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); + }); + + it("should call _mExecute with formated documents", async () => { + const result = await elasticsearch.client.mUpsert( + index, + collection, + documents, + ); + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match({ + items: [ + { + _id: "mehry", + _source: { city: "Kathmandu", age: 26 }, + created: false, + }, + { + _id: "liia", + _source: { city: "Ho Chi Minh City", age: 29 }, + created: false, + }, + ], + errors: [], + }); + }); + + it("should handle default values for upserted documents", async () => { + documents[1].default = { country: "Vietnam" }; + esRequest.body[3].upsert.country = "Vietnam"; + toImport[1]._source.default.country = "Vietnam"; + + const result = await elasticsearch.client.mUpsert( + index, + collection, + documents, + ); + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match({ + items: [ + { + _id: "mehry", + _source: { city: "Kathmandu", age: 26 }, + created: false, + }, + { + _id: "liia", + _source: { city: "Ho Chi Minh City", age: 29 }, + created: false, + }, + ], + errors: [], + }); + }); + + it("should allow additional options", async () => { + kuzzleUpdateMeta._kuzzle_info.updater = "aschen"; + kuzzleCreateMeta._kuzzle_info.author = "aschen"; + esRequest.body[0].update.retry_on_conflict = 42; + esRequest.body[2].update.retry_on_conflict = 42; + esRequest.refresh = "wait_for"; + esRequest.timeout = "10m"; + + await elasticsearch.client.mUpsert(index, collection, documents, { + refresh: "wait_for", + retryOnConflict: 42, + timeout: "10m", + userId: "aschen", + }); + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + }); + + it("should add documents without ID to rejected documents", async () => { + documents[1] = { changes: { city: "Ho Chi Minh City" } }; + esRequest.body = esRequest.body.slice(0, 2); + toImport = toImport.slice(0, 1); + const rejected = [ + { + document: { changes: { city: "Ho Chi Minh City" } }, + reason: "document _id must be a string", + status: 400, + }, + ]; + + await elasticsearch.client.mUpsert(index, collection, documents); + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + rejected, + ); + }); + + it('should return the right "_created" result on a document creation', async () => { + mExecuteResult.items[1].result = "created"; + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); + + const result = await elasticsearch.client.mUpsert( + index, + collection, + documents, + ); + + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match({ + items: [ + { + _id: "mehry", + _source: { city: "Kathmandu", age: 26 }, + created: false, + }, + { + _id: "liia", + _source: { city: "Ho Chi Minh City", age: 29 }, + created: true, + }, + ], + errors: [], + }); + }); + }); + + describe("#mReplace", () => { + let kuzzleMeta, mExecuteResult, documents; + + beforeEach(() => { + kuzzleMeta = { + _kuzzle_info: { + author: null, + createdAt: timestamp, + updater: null, + updatedAt: null, + }, + }; + + documents = [ + { _id: "mehry", body: { city: "Kathmandu" } }, + { _id: "liia", body: { city: "Ho Chi Minh City" } }, + ]; + + mExecuteResult = { items: [], errors: [] }; + + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); + + elasticsearch.client._client.mget.resolves({ + body: { + docs: [ + { _id: "mehry", found: true }, + { _id: "liia", found: true }, + ], + }, + }); + }); + + it("should get documents and then format them for _mExecute", () => { + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + index: alias, + body: { + docs: [ + { _id: "mehry", _source: false }, + { _id: "liia", _source: false }, + ], + }, + }); + + const esRequest = { + refresh: undefined, + timeout: undefined, + body: [ + { index: { _id: "mehry", _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _id: "liia", _index: alias } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should add not found documents to rejected", () => { + elasticsearch.client._client.mget.resolves({ + body: { + docs: [ + { _id: "mehry", found: true }, + { _id: "liia", found: false }, + ], + }, + }); + + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + index: alias, + body: { + docs: [ + { _id: "mehry", _source: false }, + { _id: "liia", _source: false }, + ], + }, + }); + + const esRequest = { + refresh: undefined, + timeout: undefined, + body: [ + { index: { _id: "mehry", _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + ], + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + ]; + const rejected = [ + { + document: { + _id: "liia", + body: { _kuzzle_info: undefined, city: "Ho Chi Minh City" }, + }, + reason: "document not found", + status: 404, + }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + rejected, + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should add documents without an ID to rejected", () => { + documents = [ + { _id: "mehry", body: { city: "Kathmandu" } }, + { body: { city: "Ho Chi Minh City" } }, + ]; + elasticsearch.client._client.mget.resolves({ + body: { + docs: [{ _id: "mehry", found: true }], + }, + }); + + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.mget).be.calledWithMatch({ + index: alias, + body: { + docs: [{ _id: "mehry", _source: false }], + }, + }); + + const esRequest = { + refresh: undefined, + timeout: undefined, + body: [ + { index: { _id: "mehry", _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + ], + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + ]; + const rejected = [ + { + document: { body: { city: "Ho Chi Minh City" } }, + reason: "document _id must be a string", + status: 400, + }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + rejected, + ); + + should(result).match(mExecuteResult); + }); + }); + + it("should allow additional options", () => { + kuzzleMeta._kuzzle_info.author = "aschen"; + + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + { + refresh: "wait_for", + timeout: "10m", + userId: "aschen", + }, + ); + + return promise.then((result) => { + const esRequest = { + refresh: "wait_for", + timeout: "10m", + body: [ + { index: { _id: "mehry", _index: alias } }, + { city: "Kathmandu", ...kuzzleMeta }, + { index: { _id: "liia", _index: alias } }, + { city: "Ho Chi Minh City", ...kuzzleMeta }, + ], + }; + const toImport = [ + { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, + { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, + ]; + should(elasticsearch.client._mExecute).be.calledWithMatch( + esRequest, + toImport, + [], + ); + + should(result).match(mExecuteResult); + }); + }); + }); + + describe("#mDelete", () => { + let documentIds; + + beforeEach(() => { + documentIds = ["mehry", "liia"]; + + elasticsearch.client._getAllDocumentsFromQuery = sinon.stub().resolves([ + { _id: "mehry", _source: { city: "Kathmandu" } }, + { _id: "liia", _source: { city: "Ho Chi Minh City" } }, + ]); + + elasticsearch.client._client.deleteByQuery.resolves({ + body: { + total: 2, + deleted: 2, + failures: [], + }, + }); + + elasticsearch.client._client.indices.refresh.resolves({ + body: { _shards: 1 }, + }); + + elasticsearch.client.mGet = sinon.stub().resolves({ + items: [ + { _id: "mehry", _source: { city: "Kathmandu" } }, + { _id: "liia", _source: { city: "Ho Chi Minh City" } }, + ], + }); + }); + + it("should allow to delete multiple documents with deleteByQuery", async () => { + const result = await elasticsearch.client.mDelete( + index, + collection, + documentIds, + ); + + should(elasticsearch.client._client.indices.refresh).be.calledWith({ + index: `@&${index}.${collection}`, + }); + + should(elasticsearch.client.mGet).be.calledWithMatch(index, collection, [ + "mehry", + "liia", + ]); + + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { ids: { values: ["mehry", "liia"] } } }, + scroll: "5s", + }); + + should(result).match({ + documents: [ + { _id: "mehry", _source: { city: "Kathmandu" } }, + { _id: "liia", _source: { city: "Ho Chi Minh City" } }, + ], + errors: [], + }); + }); + + it("should add non existing documents to rejected", () => { + elasticsearch.client.mGet = sinon.stub().resolves({ + items: [{ _id: "mehry", _source: { city: "Kathmandu" } }], + }); + + const promise = elasticsearch.client.mDelete( + index, + collection, + documentIds, + ); + + return promise.then((result) => { + should(elasticsearch.client.mGet).be.calledWithMatch( + index, + collection, + ["mehry", "liia"], + ); + + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { ids: { values: ["mehry"] } } }, + scroll: "5s", + }); + + should(result).match({ + documents: [{ _id: "mehry", _source: { city: "Kathmandu" } }], + errors: [{ _id: "liia", reason: "document not found", status: 404 }], + }); + }); + }); + + it("should add document with ID non string to rejected", () => { + elasticsearch.client.mGet = sinon.stub().resolves({ + items: [{ _id: "mehry", _source: { city: "Kathmandu" } }], + }); + + const promise = elasticsearch.client.mDelete(index, collection, [ + "mehry", + 42, + ]); + + return promise.then((result) => { + should(elasticsearch.client.mGet).be.calledWithMatch( + index, + collection, + ["mehry"], + ); + + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { ids: { values: ["mehry"] } } }, + scroll: "5s", + }); + + should(result).match({ + documents: [{ _id: "mehry", _source: { city: "Kathmandu" } }], + errors: [ + { _id: 42, reason: "document _id must be a string", status: 400 }, + ], + }); + }); + }); + + it("should allow additional options", () => { + const promise = elasticsearch.client.mDelete( + index, + collection, + documentIds, + { + refresh: "wait_for", + }, + ); + + return promise.then(() => { + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ + index: alias, + body: { query: { ids: { values: ["mehry", "liia"] } } }, + scroll: "5s", + refresh: true, + }); + }); + }); + }); + + describe("_mExecute", () => { + let esRequest, documents, partialErrors; + + beforeEach(() => { + esRequest = { + refresh: undefined, + body: [ + { index: { _index: alias, _id: "liia" } }, + { city: "Kathmandu" }, + { update: { _index: alias, _id: "mehry" } }, + { doc: { city: "Kathmandu" } }, + ], + }; + + documents = [ + { _id: "liia", _source: { city: "Kathmandu" } }, + { _id: "mehry", _source: { city: "Ho Chi Minh City" } }, + ]; + + partialErrors = [ + { + document: { body: { some: "document" } }, + status: 400, + reason: "some reason", + }, + ]; + + elasticsearch.client._client.bulk.resolves({ + body: { + items: [ + { + index: { + _id: "liia", + status: 201, + _version: 1, + result: "created", + created: true, + foo: "bar", + }, + }, + { + index: { + _id: "mehry", + status: 400, + error: { reason: "bad request" }, + bar: "foo", + }, + }, + ], + }, + }); + }); + + it("should call client.bulk and separate success from errors", () => { + const promise = elasticsearch.client._mExecute( + esRequest, + documents, + partialErrors, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.bulk).be.calledWithMatch(esRequest); + + const expectedResult = [ + { + _id: "liia", + _source: { city: "Kathmandu" }, + status: 201, + _version: 1, + created: true, + result: "created", + }, + ]; + const expectedErrors = [ + { + document: { body: { some: "document" } }, + status: 400, + reason: "some reason", + }, + { + document: { _id: "mehry", _source: { city: "Ho Chi Minh City" } }, + status: 400, + reason: "bad request", + }, + ]; + should(result).match({ + items: expectedResult, + errors: expectedErrors, + }); + }); + }); + + it("should not call bulk if there is no documents", () => { + const promise = elasticsearch.client._mExecute( + esRequest, + [], + partialErrors, + ); + + return promise.then((result) => { + should(elasticsearch.client._client.bulk).not.be.called(); + + const expectedErrors = [ + { + document: { body: { some: "document" } }, + reason: "some reason", + }, + ]; + should(result).match({ + items: [], + errors: expectedErrors, + }); + }); + }); + + it("should reject if limit document reached", () => { + kuzzle.config.limits.documentsWriteCount = 1; + + const promise = elasticsearch.client._mExecute( + esRequest, + documents, + partialErrors, + ); + + return should(promise).be.rejectedWith({ + id: "services.storage.write_limit_exceeded", + }); + }); + + it('should not reject if the documents limit is reached but the "limits" option is false', () => { + kuzzle.config.limits.documentsWriteCount = 1; + + const promise = elasticsearch.client._mExecute( + esRequest, + documents, + partialErrors, + { limits: false }, + ); + + return should(promise).be.fulfilled(); + }); + + it("should return a rejected promise if client fails", () => { + elasticsearch.client._client.bulk.rejects(esClientError); + + const promise = elasticsearch.client._mExecute( + esRequest, + documents, + partialErrors, + ); + + return should(promise) + .be.rejected() + .then(() => { + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( + esClientError, + ); + }); + }); + }); + + describe("#_extractMDocuments", () => { + it("should add documents without body in rejected array", () => { + const documents = [ + { _id: "liia", body: { city: "Kathmandu" } }, + { _id: "no-body" }, + ]; + const kuzzleMeta = { + _kuzzle_info: { + author: null, + createdAt: timestamp, + updater: null, + updatedAt: null, + }, + }; + + const { rejected, extractedDocuments } = + elasticsearch.client._extractMDocuments(documents, kuzzleMeta); + + should(rejected).match([ + { + document: { _id: "no-body" }, + reason: "document body must be an object", + }, + ]); + + should(extractedDocuments).match([ + { + _id: "liia", + _source: { city: "Kathmandu" }, + }, + ]); + }); + }); + + describe("#isIndexNameValid", () => { + it("should allow a valid index name", () => { + should(elasticsearch.client.isIndexNameValid("foobar")).be.true(); + }); + + it("should not allow empty index names", () => { + should(elasticsearch.client.isIndexNameValid("")).be.false(); + }); + + it("should not allow uppercase chars", () => { + should(elasticsearch.client.isIndexNameValid("bAr")).be.false(); + }); + + it("should not allow index names that are too long", () => { + return should( + elasticsearch.client.isIndexNameValid("Ӣ".repeat(64)), + ).be.false(); + }); + + it("should not allow forbidden chars in the name", () => { + const forbidden = '\\/*?"<>| \t\r\n,#:%.&'; + + for (let i = 0; i < forbidden.length; i++) { + const name = `foo${forbidden[i]}bar`; + + should(elasticsearch.client.isIndexNameValid(name)).be.false(); + } + }); + }); + + describe("#isCollectionNameValid", () => { + it("should allow a valid collection name", () => { + should(elasticsearch.client.isCollectionNameValid("foobar")).be.true(); + }); + + it("should not allow empty collection names", () => { + should(elasticsearch.client.isCollectionNameValid("")).be.false(); + }); + + it("should not allow uppercase chars", () => { + should(elasticsearch.client.isCollectionNameValid("bAr")).be.false(); + }); + + it("should not allow collection names that are too long", () => { + return should( + elasticsearch.client.isCollectionNameValid("Ӣ".repeat(64)), + ).be.false(); + }); + + it("should not allow forbidden chars in the name", () => { + const forbidden = '\\/*?"<>| \t\r\n,#:%.&'; + + for (let i = 0; i < forbidden.length; i++) { + const name = `foo${forbidden[i]}bar`; + + should(elasticsearch.client.isCollectionNameValid(name)).be.false(); + } + }); + }); + + describe("#getSchema", () => { + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [ + { alias: "@&nepali.mehry" }, + { alias: "@&nepali._kuzzle_keep" }, + { alias: "@&istanbul._kuzzle_keep" }, + ], + }); + }); + + it("should returns the DB schema without hidden collections", async () => { + const schema = await elasticsearch.client.getSchema(); + should(schema).be.eql({ + nepali: ["mehry"], + istanbul: [], + }); + }); + }); + + describe("#_createHiddenCollection", () => { + const hiddenIndice = "&nisantasi._kuzzle_keep"; + const hiddenAlias = `@${hiddenIndice}`; + + beforeEach(() => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [], + }); + + sinon + .stub(elasticsearch.client, "_getAvailableIndice") + .resolves(hiddenIndice); + sinon.stub(elasticsearch.client, "_getWaitForActiveShards").returns(1); + }); + + afterEach(() => { + elasticsearch.client._getAvailableIndice.restore(); + }); + + it("creates the hidden collection", async () => { + elasticsearch.client._client.indices.create.resolves({}); + + await elasticsearch.client._createHiddenCollection("nisantasi"); + + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: hiddenIndice, + body: { + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 1, + number_of_replicas: 1, + }, + }, + }); + should(Mutex.prototype.lock).be.called(); + should(Mutex.prototype.unlock).be.called(); + }); + + it("does not create the hidden collection if it already exists", async () => { + elasticsearch.client._client.cat.aliases.resolves({ + body: [{ alias: hiddenAlias }], + }); + + await elasticsearch.client._createHiddenCollection("nisantasi"); + + should(elasticsearch.client._client.indices.create).not.be.called(); + }); + + it("does create hidden collection based on global settings", async () => { + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.config.defaultSettings = { + number_of_shards: 42, + number_of_replicas: 42, + }; + + await elasticsearch.client._createHiddenCollection("nisantasi"); + + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: hiddenIndice, + body: { + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 42, + number_of_replicas: 42, + }, + }, + }); + should(Mutex.prototype.lock).be.called(); + should(Mutex.prototype.unlock).be.called(); + }); + + it("should wait for all shards to being active when using an Elasticsearch cluster", async () => { + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.client._getWaitForActiveShards = sinon + .stub() + .returns("all"); + await elasticsearch.client._createHiddenCollection("nisantasi"); + + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: hiddenIndice, + body: { + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 1, + number_of_replicas: 1, + }, + }, + wait_for_active_shards: "all", + }); + }); + + it("should wait for only one shard to being active when using a single node Elasticsearch cluster", async () => { + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.client._getWaitForActiveShards = sinon.stub().returns(1); + await elasticsearch.client._createHiddenCollection("nisantasi"); + + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ + index: hiddenIndice, + body: { + aliases: { [hiddenAlias]: {} }, + settings: { + number_of_shards: 1, + number_of_replicas: 1, + }, + }, + wait_for_active_shards: 1, + }); + }); + }); + + describe("#_checkMappings", () => { + it("should throw when a property is incorrect", () => { + const mapping2 = { + type: "nested", + properties: {}, + }; + const mapping = { + properties: {}, + dinamic: "false", + }; + + global.NODE_ENV = "development"; + should(() => elasticsearch.client._checkMappings(mapping)).throw({ + message: + 'Invalid mapping property "mappings.dinamic". Did you mean "dynamic"?', + id: "services.storage.invalid_mapping", + }); + + should(() => elasticsearch.client._checkMappings(mapping2)).throw({ + message: 'Invalid mapping property "mappings.type".', + id: "services.storage.invalid_mapping", + }); + }); + + it("should throw when a nested property is incorrect", () => { + const mapping = { + dynamic: "false", + properties: { + name: { type: "keyword" }, + car: { + dinamic: "false", + properties: { + brand: { type: "keyword" }, + }, + }, + }, + }; + + global.NODE_ENV = "development"; + should(() => elasticsearch.client._checkMappings(mapping)).throw({ + message: + 'Invalid mapping property "mappings.properties.car.dinamic". Did you mean "dynamic"?', + id: "services.storage.invalid_mapping", + }); + + global.NODE_ENV = "production"; + should(() => elasticsearch.client._checkMappings(mapping)).throw({ + message: 'Invalid mapping property "mappings.properties.car.dinamic".', + id: "services.storage.invalid_mapping", + }); + }); + + it("should return null if no properties are incorrect", () => { + const mapping = { + dynamic: "false", + properties: { + name: { type: "keyword" }, + car: { + dynamic: "false", + dynamic_templates: {}, + type: "nested", + properties: { + brand: { type: "keyword" }, + }, + }, + }, + }; + + should(() => elasticsearch.client._checkMappings(mapping)).not.throw(); + }); + }); + + describe("Collection emulation utils", () => { + let internalES; + let publicES; + + beforeEach(async () => { + publicES = new ES(kuzzle.config.services.storageEngine); + internalES = new ES( + kuzzle.config.services.storageEngine, + scopeEnum.PRIVATE, + ); + + sinon.stub(publicES.client, "waitForElasticsearch").resolves(); + sinon.stub(internalES.client, "waitForElasticsearch").resolves(); + publicES.client._client = new ESClientMock("7.0.0"); + internalES.client._client = new ESClientMock("7.0.0"); + + await publicES.init(); + await internalES.init(); + }); + + describe("#_getAlias", () => { + it("return alias name for a collection", () => { + const publicAlias = publicES.client._getAlias("nepali", "liia"); + const internalAlias = internalES.client._getAlias("nepali", "mehry"); + + should(publicAlias).be.eql("@&nepali.liia"); + should(internalAlias).be.eql("@%nepali.mehry"); + }); + }); + + describe("#_getIndice", () => { + let publicBody; + let privateBody; + + it("return the indice name associated to an alias (index+collection)", async () => { + publicBody = [ + { alias: "@&nepali.liia", index: "&nepali.lia", filter: 0 }, + ]; + privateBody = [ + { alias: "@%nepali.mehry", index: "%nepalu.mehry", filter: 0 }, + ]; + publicES.client._client.cat.aliases.resolves({ body: publicBody }); + internalES.client._client.cat.aliases.resolves({ body: privateBody }); + + const publicIndice = await publicES.client._getIndice("nepali", "liia"); + const internalIndice = await internalES.client._getIndice( + "nepali", + "mehry", + ); + + should(publicIndice).be.eql("&nepali.lia"); + should(internalIndice).be.eql("%nepalu.mehry"); + }); + + it("throw if there is no indice associated with the alias", async () => { + publicES.client._client.cat.aliases.resolves({ body: [] }); + internalES.client._client.cat.aliases.resolves({ body: [] }); + + await should( + publicES.client._getIndice("nepali", "liia"), + ).be.rejectedWith({ + id: "services.storage.unknown_index_collection", + }); + + await should( + internalES.client._getIndice("nepali", "mehry"), + ).be.rejectedWith({ + id: "services.storage.unknown_index_collection", + }); + }); + + it("throw if there is more than one indice associated with the alias", async () => { + publicBody = [ + { alias: "@&nepali.liia", index: "&nepali.lia", filter: 0 }, + { alias: "@&nepali.liia", index: "&nepali.liia", filter: 0 }, + ]; + privateBody = [ + { alias: "@%nepali.mehry", index: "%nepalu.mehry", filter: 0 }, + { alias: "@%nepali.mehry", index: "%nepali.mehry", filter: 0 }, + ]; + publicES.client._client.cat.aliases.resolves({ body: publicBody }); + internalES.client._client.cat.aliases.resolves({ body: privateBody }); + + await should( + publicES.client._getIndice("nepali", "liia"), + ).be.rejectedWith({ + id: "services.storage.multiple_indice_alias", + }); + + await should( + internalES.client._getIndice("nepali", "mehry"), + ).be.rejectedWith({ + id: "services.storage.multiple_indice_alias", + }); + }); + }); + + describe("#_getAvailableIndice", () => { + it("return simple indice whenever it is possible", async () => { + publicES.client._client.indices.exists.resolves({ body: false }); + internalES.client._client.indices.exists.resolves({ body: false }); + + const publicIndice = await publicES.client._getAvailableIndice( + "nepali", + "liia", + ); + const internalIndice = await internalES.client._getAvailableIndice( + "nepali", + "_kuzzle_keep", + ); + + should(publicIndice).be.eql("&nepali.liia"); + should(internalIndice).be.eql("%nepali._kuzzle_keep"); + }); + + it("return a suffixed indice if necessary (indice already taken)", async () => { + publicES.client._client.indices.exists + .onFirstCall() + .resolves({ body: true }) + .resolves({ body: false }); + + internalES.client._client.indices.exists + .onFirstCall() + .resolves({ body: true }) + .resolves({ body: false }); + + const publicIndice = await publicES.client._getAvailableIndice( + "nepali", + "liia", + ); + + const internalIndice = await internalES.client._getAvailableIndice( + "nepali", + "mehry", + ); + + should(publicIndice).match(new RegExp("&nepali.liia\\.\\d+")); + should(internalIndice).match(new RegExp("%nepali.mehry\\.\\d+")); + }); + + it("return a truncated and suffixed indice if necessary (indice + suffix too long)", async () => { + const longIndex = + "averyveryverylongindexwhichhasexactlythemaximumlengthacceptedofonehundredandtwentysixcharactersandthatiswaytoolongdontyouthink"; + const longCollection = + "averyverylongcollectionwhichhasexactlythemaximumlengthacceptedofonehundredandtwentysixcharactersandthatswaytoolongdontyouthink"; + + publicES.client._client.indices.exists + .onFirstCall() + .resolves({ body: true }) + .resolves({ body: false }); + internalES.client._client.indices.exists + .onFirstCall() + .resolves({ body: true }) + .resolves({ body: false }); + + const publicIndice = await publicES.client._getAvailableIndice( + longIndex, + longCollection, + ); + const internalIndice = await internalES.client._getAvailableIndice( + longIndex, + longCollection, + ); + + const publicIndiceCaptureSuffix = new RegExp(`(\\d+)`).exec( + publicIndice, + )[0].length; + const internalIndiceCaptureSuffix = new RegExp(`(\\d+)`).exec( + internalIndice, + )[0].length; + + should(publicIndice).match( + new RegExp( + `&${longIndex}.${longCollection.substr(0, longCollection.length - publicIndiceCaptureSuffix)}\\.\\d+`, + ), + ); + should(internalIndice).match( + new RegExp( + `%${longIndex}.${longCollection.substr(0, longCollection.length - internalIndiceCaptureSuffix)}\\.\\d+`, + ), + ); + + should(publicIndice).match( + (value) => Buffer.from(value).length === 255, + ); + should(internalIndice).match( + (value) => Buffer.from(value).length === 255, + ); + }); + }); + + describe("#_getAliasFromIndice", () => { + let publicBody; + let privateBody; + + it("return the list of alias associated with an indice", async () => { + publicBody = { + ["&nepali.lia"]: { + aliases: { + ["@&nepali.liia"]: {}, + }, + }, + }; + privateBody = { + ["%nepalu.mehry"]: { + aliases: { + ["@%nepali.mehry"]: {}, + }, + }, + }; + publicES.client._client.indices.getAlias.resolves({ body: publicBody }); + internalES.client._client.indices.getAlias.resolves({ + body: privateBody, + }); + + const publicIndice = + await publicES.client._getAliasFromIndice("&nepali.lia"); + const internalIndice = + await internalES.client._getAliasFromIndice("%nepalu.mehry"); + + should(publicIndice).be.eql(["@&nepali.liia"]); + should(internalIndice).be.eql(["@%nepali.mehry"]); + }); + + it("throw if there is no alias associated with the indice", async () => { + publicBody = { + ["&nepali.lia"]: { + aliases: {}, + }, + }; + privateBody = { + ["%nepalu.mehry"]: { + aliases: {}, + }, + }; + publicES.client._client.indices.getAlias.resolves({ body: publicBody }); + internalES.client._client.indices.getAlias.resolves({ + body: privateBody, + }); + + await should( + publicES.client._getAliasFromIndice("&nepali.lia"), + ).be.rejectedWith({ id: "services.storage.unknown_index_collection" }); + + await should( + internalES.client._getAliasFromIndice("%nepalu.mehry"), + ).be.rejectedWith({ id: "services.storage.unknown_index_collection" }); + }); + + it("should not throw if there is more than one alias associated with the indice", async () => { + publicBody = { + ["&nepali.lia"]: { + aliases: { + ["@&nepali.liia"]: {}, + ["@&nepali.lia"]: {}, + }, + }, + }; + privateBody = { + ["%nepalu.mehry"]: { + aliases: { + ["@%nepali.mehry"]: {}, + ["@%nepalu.mehry"]: {}, + }, + }, + }; + publicES.client._client.indices.getAlias.resolves({ body: publicBody }); + internalES.client._client.indices.getAlias.resolves({ + body: privateBody, + }); + + await should( + publicES.client._getAliasFromIndice("&nepali.lia"), + ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); + + await should( + internalES.client._getAliasFromIndice("%nepalu.mehry"), + ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); + }); + + it('should not throw if there is more than one alias associated with the indice but the aliases are not prefixed with "@"', async () => { + publicBody = { + ["&nepali.lia"]: { + aliases: { + ["@&nepali.liia"]: {}, + ["&nepali.lia"]: {}, + }, + }, + }; + privateBody = { + ["%nepalu.mehry"]: { + aliases: { + ["@%nepali.mehry"]: {}, + ["%nepalu.mehry"]: {}, + }, + }, + }; + publicES.client._client.indices.getAlias.resolves({ body: publicBody }); + internalES.client._client.indices.getAlias.resolves({ + body: privateBody, + }); + + await should( + publicES.client._getAliasFromIndice("&nepali.lia"), + ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); + + await should( + internalES.client._getAliasFromIndice("%nepalu.mehry"), + ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); + }); + }); + + describe("#_getWaitForActiveShards", () => { + it("should return all if an Elasticsearch cluster is used", async () => { + elasticsearch.client._client.cat.nodes = sinon + .stub() + .resolves({ body: ["node1", "node2"] }); + + const waitForActiveShards = + await elasticsearch.client._getWaitForActiveShards(); + + should(waitForActiveShards).be.eql("all"); + }); + + it("should return 1 if a single node Elasticsearch cluster is used", async () => { + elasticsearch.client._client.cat.nodes = sinon + .stub() + .resolves({ body: ["node1"] }); + + const waitForActiveShards = + await elasticsearch.client._getWaitForActiveShards(); + + should(waitForActiveShards).be.eql("1"); + }); + }); + + describe("#generateMissingAliases", () => { + const indicesBody = { + body: [ + { index: "&nepali.liia", status: "open" }, + { index: "%nepali.liia", status: "open" }, + { index: "&nepali.mehry", status: "open" }, + { index: "%nepali.mehry", status: "open" }, + ], + }; + let aliasesList = [ + { + alias: "@&nepali.lia", + index: "nepali", + collection: "lia", + indice: "&nepali.liia", + }, + ]; + + beforeEach(() => { + publicES.client._client.indices.updateAliases.resolves(); + internalES.client._client.indices.updateAliases.resolves(); + + publicES.client._client.cat.indices.resolves(indicesBody); + internalES.client._client.cat.indices.resolves(indicesBody); + + sinon.stub(publicES.client, "listAliases").resolves(aliasesList); + sinon.stub(internalES.client, "listAliases").resolves(aliasesList); + }); + + afterEach(() => { + publicES.client.listAliases.restore(); + internalES.client.listAliases.restore(); + }); + + it("Find indices without associated aliases and create some accordingly", async () => { + await publicES.client.generateMissingAliases(); + await internalES.client.generateMissingAliases(); + + should(publicES.client._client.indices.updateAliases).be.calledWith({ + body: { + actions: [ + { add: { alias: "@&nepali.mehry", index: "&nepali.mehry" } }, + ], + }, + }); + should(internalES.client._client.indices.updateAliases).be.calledWith({ + body: { + actions: [ + { add: { alias: "@%nepali.liia", index: "%nepali.liia" } }, + { add: { alias: "@%nepali.mehry", index: "%nepali.mehry" } }, + ], + }, + }); + }); + + it("do nothing when every indice is associated with an alias", async () => { + aliasesList = [ + { + alias: "@&nepali.lia", + index: "nepali", + collection: "lia", + indice: "&nepali.liia", + }, + { + alias: "@%nepali.lia", + index: "nepali", + collection: "lia", + indice: "%nepali.liia", + }, + { + alias: "@&nepalu.mehry", + index: "nepalu", + collection: "mehry", + indice: "&nepali.mehry", + }, + { + alias: "@%nepalu.mehry", + index: "nepalu", + collection: "mehry", + indice: "%nepali.mehry", + }, + ]; + + publicES.client.listAliases.resolves(aliasesList); + internalES.client.listAliases.resolves(aliasesList); + + await publicES.client.generateMissingAliases(); + await internalES.client.generateMissingAliases(); + + should(publicES.client._client.indices.updateAliases).not.be.called(); + should(internalES.client._client.indices.updateAliases).not.be.called(); + }); + }); + + describe("#_extractIndex", () => { + it("extract the index from alias", () => { + const publicIndex = publicES.client._extractIndex("@&nepali.liia"); + const internalIndex = internalES.client._extractIndex("@%nepali.liia"); + + should(publicIndex).be.eql("nepali"); + should(internalIndex).be.eql("nepali"); + }); + }); + + describe("#_extractCollection", () => { + it("extract the collection from alias", () => { + const publicCollection = + publicES.client._extractCollection("@&nepali.liia"); + const publicCollection2 = + publicES.client._extractCollection("@&vietnam.lfiduras"); + const publicCollection3 = + publicES.client._extractCollection("@&vietnam.l"); + const publicCollection4 = publicES.client._extractCollection( + "@&vietnam.iamaverylongcollectionnamebecauseiworthit", + ); + const internalCollection = + internalES.client._extractCollection("@%nepali.liia"); + + should(publicCollection).be.eql("liia"); + should(publicCollection2).be.eql("lfiduras"); + should(publicCollection3).be.eql("l"); + should(publicCollection4).be.eql( + "iamaverylongcollectionnamebecauseiworthit", + ); + should(internalCollection).be.eql("liia"); + }); + }); + + describe("#_extractSchema", () => { + it("should extract the list of indexes and their collections", () => { + const aliases = [ + "@%nepali.liia", + "@%nepali.mehry", + + "@&nepali.panipokari", + "@&nepali._kuzzle_keep", + "@&vietnam.lfiduras", + "@&vietnam._kuzzle_keep", + ]; + + const publicSchema = publicES.client._extractSchema(aliases); + const internalSchema = internalES.client._extractSchema(aliases); + + should(internalSchema).be.eql({ + nepali: ["liia", "mehry"], + }); + should(publicSchema).be.eql({ + nepali: ["panipokari"], + vietnam: ["lfiduras"], + }); + }); + + it("should include hidden collection with the option", () => { + const aliases = [ + "@%nepali.liia", + "@%nepali.mehry", + + "@&nepali.panipokari", + "@&nepali._kuzzle_keep", + "@&vietnam.lfiduras", + "@&vietnam._kuzzle_keep", + ]; + + const publicSchema = publicES.client._extractSchema(aliases, { + includeHidden: true, + }); + const internalSchema = internalES.client._extractSchema(aliases, { + includeHidden: true, + }); + + should(internalSchema).be.eql({ + nepali: ["liia", "mehry"], + }); + should(publicSchema).be.eql({ + nepali: ["panipokari", "_kuzzle_keep"], + vietnam: ["lfiduras", "_kuzzle_keep"], + }); + }); + }); + + describe("#_sanitizeSearchBody", () => { + let searchBody; + + it("should return the same query if all top level keywords are valid", () => { + searchBody = {}; + for (const key of publicES.client.searchBodyKeys) { + searchBody[key] = { foo: "bar" }; + } + + const result = publicES.client._sanitizeSearchBody( + Object.assign({}, searchBody), + ); + + should(result).be.deepEqual(searchBody); + }); + + it("should throw if any top level keyword is not in the white list", () => { + searchBody = { + unknown: {}, + }; + + should(() => publicES.client._sanitizeSearchBody(searchBody)).throw( + BadRequestError, + { id: "services.storage.invalid_search_query" }, + ); + }); + + it("should throw if any script keyword is found in the query (even deeply nested)", () => { + searchBody = { + query: { + bool: { + filter: [ + { + script: { + script: { + inline: + "doc[message.keyword].value.length() > params.length", + params: { + length: 25, + }, + }, + }, + }, + ], + }, + }, + }; + + should(() => publicES.client._sanitizeSearchBody(searchBody)).throw( + BadRequestError, + { id: "services.storage.invalid_query_keyword" }, + ); + }); + + it("should turn empty queries into match_all queries", () => { + searchBody = { + query: {}, + }; + + const result = publicES.client._sanitizeSearchBody(searchBody); + + should(result).be.deepEqual({ query: { match_all: {} } }); + }); + }); + + describe("#_scriptCheck", () => { + it("should allows stored-scripts", () => { + const searchParams = { + query: { + match: { + script: { + id: "count-documents", + params: { + length: 25, + }, + }, + }, + }, + }; + + should(() => publicES.client._scriptCheck(searchParams)).not.throw(); + }); + + it("should not throw when there is not a single script", () => { + const searchParams = { foo: "bar" }; + + should(() => publicES.client._scriptCheck(searchParams)).not.throw(); + }); + + it("should throw if any script is found in the query", () => { + let searchParams = { + query: { + match: { + script: { + inline: "doc[message.keyword].value.length() > params.length", + params: { + length: 25, + }, + }, + }, + }, + }; + + should(() => publicES.client._sanitizeSearchBody(searchParams)).throw( + BadRequestError, + { id: "services.storage.invalid_query_keyword" }, + ); + + searchParams = { + query: { + match: { + script: { + source: "doc[message.keyword].value.length() > params.length", + params: { + length: 25, + }, + }, + }, + }, + }; + + should(() => publicES.client._sanitizeSearchBody(searchParams)).throw( + BadRequestError, + { id: "services.storage.invalid_query_keyword" }, + ); + }); + + it("should throw if any deeply nested script keyword is found in the query", () => { + const searchParams = { + query: { + bool: { + filter: [ + { + script: { + script: { + inline: + "doc[message.keyword].value.length() > params.length", + params: { + length: 25, + }, + }, + }, + }, + ], + }, + }, + }; + + should(() => publicES.client._sanitizeSearchBody(searchParams)).throw( + BadRequestError, + { id: "services.storage.invalid_query_keyword" }, + ); + }); + }); + }); +}); diff --git a/test/service/storage/elasticsearch.test.js b/test/service/storage/elasticsearch-8.test.js similarity index 69% rename from test/service/storage/elasticsearch.test.js rename to test/service/storage/elasticsearch-8.test.js index af3d9f78ee..052032c0bc 100644 --- a/test/service/storage/elasticsearch.test.js +++ b/test/service/storage/elasticsearch-8.test.js @@ -13,7 +13,6 @@ const { } = require("../../../index"); const KuzzleMock = require("../../mocks/kuzzle.mock"); const ESClientMock = require("../../mocks/service/elasticsearchClient.mock"); -const { randomNumberMock } = require("../../mocks/name-generator.mock"); const scopeEnum = require("../../../lib/core/storage/storeScopeEnum"); const { Mutex } = require("../../../lib/util/mutex"); @@ -30,10 +29,9 @@ describe("Test: ElasticSearch service", () => { let ES; before(() => { - mockRequire("../../../lib/util/name-generator", { - randomNumber: randomNumberMock, - }); - ES = mockRequire.reRequire("../../../lib/service/storage/elasticsearch"); + ES = mockRequire.reRequire( + "../../../lib/service/storage/Elasticsearch", + ).Elasticsearch; }); after(() => { @@ -42,6 +40,7 @@ describe("Test: ElasticSearch service", () => { beforeEach(async () => { kuzzle = new KuzzleMock(); + kuzzle.config.services.storageEngine.majorVersion = 8; index = "nyc-open-data"; collection = "yellow-taxi"; @@ -51,12 +50,12 @@ describe("Test: ElasticSearch service", () => { esClientError = new Error("es client fail"); - ES.buildClient = () => new ESClientMock(); elasticsearch = new ES(kuzzle.config.services.storageEngine); + elasticsearch.client._client = new ESClientMock("8.0.0"); await elasticsearch.init(); - elasticsearch._esWrapper = { + elasticsearch.client._esWrapper = { reject: sinon.spy((error) => Promise.reject(error)), formatESError: sinon.spy((error) => error), }; @@ -73,38 +72,39 @@ describe("Test: ElasticSearch service", () => { describe("#constructor", () => { it("should initialize properties", () => { - const esPublic = new ES(kuzzle.config.services.storageEngine); const esInternal = new ES( kuzzle.config.services.storageEngine, scopeEnum.PRIVATE, ); - should(esPublic.config).be.exactly(kuzzle.config.services.storageEngine); - should(esPublic._indexPrefix).be.eql("&"); - should(esInternal._indexPrefix).be.eql("%"); + sinon.stub(esInternal.client, "waitForElasticsearch").resolves(); + esInternal.client._client = new ESClientMock("7.0.0"); + + should(elasticsearch.config).be.exactly( + kuzzle.config.services.storageEngine, + ); + should(elasticsearch.client._indexPrefix).be.eql("&"); + should(esInternal.client._indexPrefix).be.eql("%"); }); }); describe("#init", () => { it("should initialize properly", () => { - elasticsearch = new ES(kuzzle.config.services.storageEngine); - elasticsearch._buildClient = () => new ESClientMock(); - const promise = elasticsearch.init(); return should(promise) .be.fulfilledWith() .then(() => { - should(elasticsearch._client).not.be.null(); - should(elasticsearch._esWrapper).not.be.null(); - should(elasticsearch.esVersion).not.be.null(); + should(elasticsearch.client._client).not.be.null(); + should(elasticsearch.client._esWrapper).not.be.null(); + should(elasticsearch.client.esVersion).not.be.null(); }); }); }); describe("#stats", () => { beforeEach(() => { - elasticsearch._client.indices.stats.resolves({ + elasticsearch.client._client.indices.stats.resolves({ indices: { "%kuzzle.users": { total: { docs: { count: 1 }, store: { size_in_bytes: 10 } }, @@ -119,17 +119,17 @@ describe("Test: ElasticSearch service", () => { total: { docs: { count: 2 }, store: { size_in_bytes: 42 } }, }, ".geoip_databases": { - /* This index nativement do not return anything on index:stats call */ + /* This index natively do not return anything on index:stats call */ }, }, }); sinon - .stub(elasticsearch, "_getAliasFromIndice") + .stub(elasticsearch.client, "_getAliasFromIndice") .callsFake((indiceArg) => [`@${indiceArg}`]); }); afterEach(() => { - elasticsearch._getAliasFromIndice.restore(); + elasticsearch.client._getAliasFromIndice.restore(); }); it("should only request required stats from underlying client", async () => { @@ -137,15 +137,15 @@ describe("Test: ElasticSearch service", () => { metric: ["docs", "store"], }; - await elasticsearch.stats(); + await elasticsearch.client.stats(); - should(elasticsearch._client.indices.stats) + should(elasticsearch.client._client.indices.stats) .calledOnce() .calledWithMatch(esRequest); }); it("should as default ignore private and hidden indices", async () => { - const result = await elasticsearch.stats(); + const result = await elasticsearch.client.stats(); should(result).be.match({ size: 20, @@ -184,7 +184,7 @@ describe("Test: ElasticSearch service", () => { }), ); - elasticsearch._client.scroll.resolves({ + elasticsearch.client._client.scroll.resolves({ _scroll_id: "azerty", hits: { hits: [ @@ -195,15 +195,15 @@ describe("Test: ElasticSearch service", () => { }, }); - elasticsearch._getAliasFromIndice = sinon.stub(); - elasticsearch._getAliasFromIndice + elasticsearch.client._getAliasFromIndice = sinon.stub(); + elasticsearch.client._getAliasFromIndice .withArgs("&foo.foo") .returns(["@&foo.foo"]); - elasticsearch._getAliasFromIndice + elasticsearch.client._getAliasFromIndice .withArgs("&bar.bar") .returns(["@&bar.bar"]); - const result = await elasticsearch.scroll("i-am-scroll-id", { + const result = await elasticsearch.client.scroll("i-am-scroll-id", { scrollTTL: "10s", }); @@ -234,9 +234,11 @@ describe("Test: ElasticSearch service", () => { { ttl: 10000 }, ); - should(elasticsearch._client.clearScroll).not.called(); + should(elasticsearch.client._client.clearScroll).not.called(); - should(elasticsearch._client.scroll.firstCall.args[0]).be.deepEqual({ + should( + elasticsearch.client._client.scroll.firstCall.args[0], + ).be.deepEqual({ scroll: "10s", scroll_id: "i-am-scroll-id", }); @@ -280,7 +282,7 @@ describe("Test: ElasticSearch service", () => { }), ); - elasticsearch._client.scroll.resolves({ + elasticsearch.client._client.scroll.resolves({ hits: { hits: [ { _index: "&foo.foo", _id: "foo", _source: {} }, @@ -291,15 +293,15 @@ describe("Test: ElasticSearch service", () => { _scroll_id: "azerty", }); - elasticsearch._getAliasFromIndice = sinon.stub(); - elasticsearch._getAliasFromIndice + elasticsearch.client._getAliasFromIndice = sinon.stub(); + elasticsearch.client._getAliasFromIndice .withArgs("&foo.foo") .returns(["@&foo.foo"]); - elasticsearch._getAliasFromIndice + elasticsearch.client._getAliasFromIndice .withArgs("&bar.bar") .returns(["@&bar.bar"]); - const result = await elasticsearch.scroll("i-am-scroll-id", { + const result = await elasticsearch.client.scroll("i-am-scroll-id", { scrollTTL: "10s", }); @@ -310,11 +312,13 @@ describe("Test: ElasticSearch service", () => { should(kuzzle.ask).not.calledWith("core:cache:internal:store"); should(kuzzle.ask).calledWith("core:cache:internal:del", redisKey); - should(elasticsearch._client.clearScroll) + should(elasticsearch.client._client.clearScroll) .calledOnce() .calledWithMatch({ scroll_id: "azerty" }); - should(elasticsearch._client.scroll.firstCall.args[0]).be.deepEqual({ + should( + elasticsearch.client._client.scroll.firstCall.args[0], + ).be.deepEqual({ scroll: "10s", scroll_id: "i-am-scroll-id", }); @@ -342,33 +346,37 @@ describe("Test: ElasticSearch service", () => { }); it("should reject promise if a scroll fails", async () => { - elasticsearch._client.scroll.rejects(esClientError); + elasticsearch.client._client.scroll.rejects(esClientError); kuzzle.ask.withArgs("core:cache:internal:get").resolves("1"); - await should(elasticsearch.scroll("i-am-scroll-id")).be.rejected(); + await should(elasticsearch.client.scroll("i-am-scroll-id")).be.rejected(); - should(elasticsearch._esWrapper.formatESError).calledWith(esClientError); + should(elasticsearch.client._esWrapper.formatESError).calledWith( + esClientError, + ); }); it("should reject if the scrollId does not exists in Kuzzle cache", async () => { kuzzle.ask.withArgs("core:cache:internal:get").resolves(null); - await should(elasticsearch.scroll("i-am-scroll-id")).be.rejectedWith({ + await should( + elasticsearch.client.scroll("i-am-scroll-id"), + ).be.rejectedWith({ id: "services.storage.unknown_scroll_id", }); - should(elasticsearch._client.scroll).not.be.called(); + should(elasticsearch.client._client.scroll).not.be.called(); }); it("should reject if the scroll duration is too great", async () => { - elasticsearch._config.maxScrollDuration = "21m"; + elasticsearch.client._config.maxScrollDuration = "21m"; await should( - elasticsearch.scroll("i-am-scroll-id", { scrollTTL: "42m" }), + elasticsearch.client.scroll("i-am-scroll-id", { scrollTTL: "42m" }), ).be.rejectedWith({ id: "services.storage.scroll_duration_too_great" }); - should(elasticsearch._client.scroll).not.be.called(); + should(elasticsearch.client._client.scroll).not.be.called(); }); it("should default an explicitly null scrollTTL argument", async () => { @@ -382,12 +390,12 @@ describe("Test: ElasticSearch service", () => { }), ); - elasticsearch._client.scroll.resolves({ + elasticsearch.client._client.scroll.resolves({ hits: { hits: [], total: { value: 1000 } }, _scroll_id: "azerty", }); - await elasticsearch.scroll("scroll-id", { scrollTTL: null }); + await elasticsearch.client.scroll("scroll-id", { scrollTTL: null }); should(cacheStub).calledOnce(); should(kuzzle.ask).calledWith( @@ -401,7 +409,9 @@ describe("Test: ElasticSearch service", () => { sinon.match.object, ); - should(elasticsearch._client.scroll.firstCall.args[0]).be.deepEqual({ + should( + elasticsearch.client._client.scroll.firstCall.args[0], + ).be.deepEqual({ scroll: elasticsearch.config.defaults.scrollTTL, scroll_id: "scroll-id", }); @@ -416,10 +426,10 @@ describe("Test: ElasticSearch service", () => { }); it("should join multi indexes and collections when specified with targets", async () => { - elasticsearch._client.search.rejects(new Error()); // Skip rest of the execution + elasticsearch.client._client.search.rejects(new Error()); // Skip rest of the execution try { - await elasticsearch.search({ + await elasticsearch.client.search({ targets: [ { index: "nyc-open-data", @@ -435,7 +445,7 @@ describe("Test: ElasticSearch service", () => { } catch (error) { // Catch error since we throw to skip the rest of the execution } finally { - should(elasticsearch._client.search.firstCall.args[0]).match({ + should(elasticsearch.client._client.search.firstCall.args[0]).match({ index: "@&nyc-open-data.yellow-taxi,@&nyc-open-data.red-taxi,@&nyc-close-data.green-taxi,@&nyc-close-data.blue-taxi", query: { match_all: {} }, @@ -448,7 +458,7 @@ describe("Test: ElasticSearch service", () => { }); it("should be able to search documents", async () => { - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ ...searchBody, aggregations: { some: "aggregs" }, hits: { @@ -480,16 +490,18 @@ describe("Test: ElasticSearch service", () => { _scroll_id: "i-am-scroll-id", }); - elasticsearch._getAliasFromIndice = sinon.stub(); - elasticsearch._getAliasFromIndice.withArgs(indice).returns([alias]); + elasticsearch.client._getAliasFromIndice = sinon.stub(); + elasticsearch.client._getAliasFromIndice + .withArgs(indice) + .returns([alias]); - const result = await elasticsearch.search({ + const result = await elasticsearch.client.search({ index, collection, searchBody, }); - should(elasticsearch._client.search.firstCall.args[0]).match({ + should(elasticsearch.client._client.search.firstCall.args[0]).match({ index: alias, query: { match_all: {} }, from: undefined, @@ -537,17 +549,17 @@ describe("Test: ElasticSearch service", () => { }); it("should be able to search with from/size and scroll arguments", async () => { - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ hits: { hits: [], total: { value: 0 } }, _scroll_id: "i-am-scroll-id", }); - await elasticsearch.search( + await elasticsearch.client.search( { index, collection, searchBody }, { from: 0, scroll: "30s", size: 1 }, ); - should(elasticsearch._client.search.firstCall.args[0]).match({ + should(elasticsearch.client._client.search.firstCall.args[0]).match({ ...searchBody, from: 0, index: alias, @@ -569,17 +581,17 @@ describe("Test: ElasticSearch service", () => { }); it("should be able to search on ES alias with invalid collection name", async () => { - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ hits: { hits: [], total: { value: 0 } }, }); - await elasticsearch.search({ + await elasticsearch.client.search({ index: "main", collection: "kuzzleData", searchBody, }); - should(elasticsearch._client.search.firstCall.args[0]).match({ + should(elasticsearch.client._client.search.firstCall.args[0]).match({ ...searchBody, index: "@&main.kuzzleData", track_total_hits: true, @@ -587,13 +599,13 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if a search fails", async () => { - elasticsearch._client.search.rejects(esClientError); + elasticsearch.client._client.search.rejects(esClientError); await should( - elasticsearch.search({ index, collection, searchBody }), + elasticsearch.client.search({ index, collection, searchBody }), ).be.rejected(); - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -605,24 +617,24 @@ describe("Test: ElasticSearch service", () => { }; return should( - elasticsearch.search({ index, collection, searchBody }), + elasticsearch.client.search({ index, collection, searchBody }), ).be.rejectedWith({ id: "services.storage.invalid_search_query" }); }); it("should not save the scrollId in the cache if not present in response", async () => { - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ hits: { hits: [], total: { value: 0 } }, }); - await elasticsearch.search({ index, collection, searchBody: {} }); + await elasticsearch.client.search({ index, collection, searchBody: {} }); should(kuzzle.ask).not.calledWith("core:cache:internal:store"); }); it("should return a rejected promise if the scroll duration is too great", async () => { - elasticsearch._config.maxScrollDuration = "21m"; + elasticsearch.client._config.maxScrollDuration = "21m"; - const promise = elasticsearch.search( + const promise = elasticsearch.client.search( { index, collection, searchBody }, { scroll: "42m" }, ); @@ -631,22 +643,22 @@ describe("Test: ElasticSearch service", () => { id: "services.storage.scroll_duration_too_great", }); - should(elasticsearch._client.search).not.be.called(); + should(elasticsearch.client._client.search).not.be.called(); }); }); describe("#get", () => { it("should allow getting a single document", () => { - elasticsearch._client.get.resolves({ + elasticsearch.client._client.get.resolves({ _id: "liia", _source: { city: "Kathmandu" }, _version: 1, }); - const promise = elasticsearch.get(index, collection, "liia"); + const promise = elasticsearch.client.get(index, collection, "liia"); return promise.then((result) => { - should(elasticsearch._client.get).be.calledWithMatch({ + should(elasticsearch.client._client.get).be.calledWithMatch({ index: alias, id: "liia", }); @@ -660,7 +672,7 @@ describe("Test: ElasticSearch service", () => { }); it("should reject requests when the user search for a document with id _search", () => { - const promise = elasticsearch.get(index, collection, "_search"); + const promise = elasticsearch.client.get(index, collection, "_search"); return should(promise).be.rejectedWith({ id: "services.storage.search_as_an_id", @@ -668,14 +680,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if a get fails", () => { - elasticsearch._client.get.rejects(esClientError); + elasticsearch.client._client.get.rejects(esClientError); - const promise = elasticsearch.get(index, collection, "liia"); + const promise = elasticsearch.client.get(index, collection, "liia"); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -684,7 +696,7 @@ describe("Test: ElasticSearch service", () => { describe("#mGet", () => { it("should allow getting multiples documents", () => { - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [ { _id: "liia", @@ -696,10 +708,13 @@ describe("Test: ElasticSearch service", () => { ], }); - const promise = elasticsearch.mGet(index, collection, ["liia", "mhery"]); + const promise = elasticsearch.client.mGet(index, collection, [ + "liia", + "mhery", + ]); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ docs: [ { _id: "liia", _index: alias }, { _id: "mhery", _index: alias }, @@ -714,14 +729,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.mget fails", () => { - elasticsearch._client.mget.rejects(esClientError); + elasticsearch.client._client.mget.rejects(esClientError); - const promise = elasticsearch.mGet(index, collection, ["liia"]); + const promise = elasticsearch.client.mGet(index, collection, ["liia"]); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -730,17 +745,20 @@ describe("Test: ElasticSearch service", () => { describe("#mExists", () => { it("should allow getting multiples existing documents", () => { - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [ { _id: "foo", found: true }, { _id: "bar", found: false }, ], }); - const promise = elasticsearch.mExists(index, collection, ["foo", "bar"]); + const promise = elasticsearch.client.mExists(index, collection, [ + "foo", + "bar", + ]); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ docs: [{ _id: "foo" }, { _id: "bar" }], index: alias, }); @@ -753,14 +771,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.mget fails", () => { - elasticsearch._client.mget.rejects(esClientError); + elasticsearch.client._client.mget.rejects(esClientError); - const promise = elasticsearch.mExists(index, collection, ["foo"]); + const promise = elasticsearch.client.mExists(index, collection, ["foo"]); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -774,14 +792,14 @@ describe("Test: ElasticSearch service", () => { match_all: {}, }, }; - elasticsearch._client.count.resolves({ + elasticsearch.client._client.count.resolves({ count: 42, }); - const promise = elasticsearch.count(index, collection, filter); + const promise = elasticsearch.client.count(index, collection, filter); return promise.then((result) => { - should(elasticsearch._client.count).be.calledWithMatch({ + should(elasticsearch.client._client.count).be.calledWithMatch({ ...filter, index: alias, }); @@ -791,14 +809,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if count fails", () => { - elasticsearch._client.count.rejects(esClientError); + elasticsearch.client._client.count.rejects(esClientError); - const promise = elasticsearch.count(index, collection); + const promise = elasticsearch.client.count(index, collection); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -807,13 +825,13 @@ describe("Test: ElasticSearch service", () => { describe("#create", () => { it("should allow creating document an ID is provided", () => { - elasticsearch._client.index.resolves({ + elasticsearch.client._client.index.resolves({ _id: "liia", _version: 1, _source: { city: "Kathmandu" }, }); - const promise = elasticsearch.create( + const promise = elasticsearch.client.create( index, collection, { city: "Kathmandu" }, @@ -821,7 +839,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, document: { city: "Kathmandu", @@ -844,18 +862,18 @@ describe("Test: ElasticSearch service", () => { }); it("should create a document when no ID is provided", () => { - elasticsearch._client.index.resolves({ + elasticsearch.client._client.index.resolves({ _id: "mehry", _version: 1, _source: { city: "Panipokari" }, }); - const promise = elasticsearch.create(index, collection, { + const promise = elasticsearch.client.create(index, collection, { city: "Panipokari", }); return promise.then((result) => { - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, document: { city: "Panipokari", @@ -877,7 +895,7 @@ describe("Test: ElasticSearch service", () => { describe("#createOrReplace", () => { beforeEach(() => { - elasticsearch._client.index.resolves({ + elasticsearch.client._client.index.resolves({ _id: "liia", _version: 1, _source: { city: "Kathmandu" }, @@ -886,7 +904,7 @@ describe("Test: ElasticSearch service", () => { }); it("should support createOrReplace capability", () => { - const promise = elasticsearch.createOrReplace( + const promise = elasticsearch.client.createOrReplace( index, collection, "liia", @@ -895,7 +913,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, document: { city: "Kathmandu", @@ -920,7 +938,7 @@ describe("Test: ElasticSearch service", () => { }); it("should not inject meta if specified", () => { - const promise = elasticsearch.createOrReplace( + const promise = elasticsearch.client.createOrReplace( index, collection, "liia", @@ -929,7 +947,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, document: { city: "Kathmandu", @@ -948,16 +966,21 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.index fails", () => { - elasticsearch._client.index.rejects(esClientError); + elasticsearch.client._client.index.rejects(esClientError); - const promise = elasticsearch.createOrReplace(index, collection, "liia", { - city: "Kathmandu", - }); + const promise = elasticsearch.client.createOrReplace( + index, + collection, + "liia", + { + city: "Kathmandu", + }, + ); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -966,7 +989,7 @@ describe("Test: ElasticSearch service", () => { describe("#update", () => { beforeEach(() => { - elasticsearch._client.update.resolves({ + elasticsearch.client._client.update.resolves({ _id: "liia", _version: 1, get: { @@ -976,12 +999,12 @@ describe("Test: ElasticSearch service", () => { }); it("should allow to update a document", () => { - const promise = elasticsearch.update(index, collection, "liia", { + const promise = elasticsearch.client.update(index, collection, "liia", { city: "Panipokari", }); return promise.then((result) => { - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1007,7 +1030,7 @@ describe("Test: ElasticSearch service", () => { }); it("should handle optional configurations", () => { - const promise = elasticsearch.update( + const promise = elasticsearch.client.update( index, collection, "liia", @@ -1016,7 +1039,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1042,23 +1065,23 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.update fails", () => { - elasticsearch._client.update.rejects(esClientError); + elasticsearch.client._client.update.rejects(esClientError); - const promise = elasticsearch.update(index, collection, "liia", { + const promise = elasticsearch.client.update(index, collection, "liia", { city: "Kathmandu", }); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); }); it("should default an explicitly null retryOnConflict", async () => { - await elasticsearch.update( + await elasticsearch.client.update( index, collection, "liia", @@ -1066,7 +1089,7 @@ describe("Test: ElasticSearch service", () => { { refresh: "wait_for", userId: "oh noes", retryOnConflict: null }, ); - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1086,7 +1109,7 @@ describe("Test: ElasticSearch service", () => { describe("#upsert", () => { beforeEach(() => { - elasticsearch._client.update.resolves({ + elasticsearch.client._client.update.resolves({ _id: "liia", _version: 2, result: "updated", @@ -1097,11 +1120,16 @@ describe("Test: ElasticSearch service", () => { }); it("should allow to upsert a document", async () => { - const result = await elasticsearch.upsert(index, collection, "liia", { - city: "Panipokari", - }); + const result = await elasticsearch.client.upsert( + index, + collection, + "liia", + { + city: "Panipokari", + }, + ); - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1133,7 +1161,7 @@ describe("Test: ElasticSearch service", () => { }); it("should handle default values for upserted documents", async () => { - const result = await elasticsearch.upsert( + const result = await elasticsearch.client.upsert( index, collection, "liia", @@ -1143,7 +1171,7 @@ describe("Test: ElasticSearch service", () => { }, ); - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1176,7 +1204,7 @@ describe("Test: ElasticSearch service", () => { }); it('should return the right "_created" result on a document creation', async () => { - elasticsearch._client.update.resolves({ + elasticsearch.client._client.update.resolves({ _id: "liia", _version: 1, result: "created", @@ -1185,7 +1213,7 @@ describe("Test: ElasticSearch service", () => { }, }); - const result = await elasticsearch.upsert( + const result = await elasticsearch.client.upsert( index, collection, "liia", @@ -1195,7 +1223,7 @@ describe("Test: ElasticSearch service", () => { }, ); - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1228,7 +1256,7 @@ describe("Test: ElasticSearch service", () => { }); it("should handle optional configurations", async () => { - const result = await elasticsearch.upsert( + const result = await elasticsearch.client.upsert( index, collection, "liia", @@ -1236,7 +1264,7 @@ describe("Test: ElasticSearch service", () => { { refresh: "wait_for", userId: "aschen", retryOnConflict: 42 }, ); - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1268,19 +1296,21 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.upsert fails", async () => { - elasticsearch._client.update.rejects(esClientError); + elasticsearch.client._client.update.rejects(esClientError); await should( - elasticsearch.upsert(index, collection, "liia", { + elasticsearch.client.upsert(index, collection, "liia", { city: "Kathmandu", }), ).rejected(); - should(elasticsearch._esWrapper.formatESError).calledWith(esClientError); + should(elasticsearch.client._esWrapper.formatESError).calledWith( + esClientError, + ); }); it("should default an explicitly null retryOnConflict", async () => { - await elasticsearch.upsert( + await elasticsearch.client.upsert( index, collection, "liia", @@ -1288,7 +1318,7 @@ describe("Test: ElasticSearch service", () => { { refresh: "wait_for", userId: "oh noes", retryOnConflict: null }, ); - should(elasticsearch._client.update).be.calledWithMatch({ + should(elasticsearch.client._client.update).be.calledWithMatch({ index: alias, doc: { city: "Panipokari", @@ -1314,21 +1344,21 @@ describe("Test: ElasticSearch service", () => { describe("#replace", () => { beforeEach(() => { - elasticsearch._client.index.resolves({ + elasticsearch.client._client.index.resolves({ _id: "liia", _version: 1, _source: { city: "Kathmandu" }, }); - elasticsearch._client.exists.resolves(true); + elasticsearch.client._client.exists.resolves(true); }); it("should support replace capability", () => { - const promise = elasticsearch.replace(index, collection, "liia", { + const promise = elasticsearch.client.replace(index, collection, "liia", { city: "Kathmandu", }); return promise.then((result) => { - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, id: "liia", document: { @@ -1352,7 +1382,7 @@ describe("Test: ElasticSearch service", () => { }); it("should accept additional options", () => { - const promise = elasticsearch.replace( + const promise = elasticsearch.client.replace( index, collection, "liia", @@ -1361,7 +1391,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, id: "liia", document: { @@ -1385,33 +1415,35 @@ describe("Test: ElasticSearch service", () => { }); it("should throw a NotFoundError Exception if document already exists", () => { - elasticsearch._client.exists.resolves(false); + elasticsearch.client._client.exists.resolves(false); - const promise = elasticsearch.replace(index, collection, "liia", { + const promise = elasticsearch.client.replace(index, collection, "liia", { city: "Kathmandu", }); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWithMatch({ + should( + elasticsearch.client._esWrapper.formatESError, + ).be.calledWithMatch({ id: "services.storage.not_found", }); - should(elasticsearch._client.index).not.be.called(); + should(elasticsearch.client._client.index).not.be.called(); }); }); it("should return a rejected promise if client.index fails", () => { - elasticsearch._client.index.rejects(esClientError); + elasticsearch.client._client.index.rejects(esClientError); - const promise = elasticsearch.replace(index, collection, "liia", { + const promise = elasticsearch.client.replace(index, collection, "liia", { city: "Kathmandu", }); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -1420,7 +1452,7 @@ describe("Test: ElasticSearch service", () => { describe("#delete", () => { beforeEach(() => { - elasticsearch._client.delete.resolves({ + elasticsearch.client._client.delete.resolves({ body: { _id: "liia", }, @@ -1428,10 +1460,10 @@ describe("Test: ElasticSearch service", () => { }); it("should allow to delete a document", () => { - const promise = elasticsearch.delete(index, collection, "liia"); + const promise = elasticsearch.client.delete(index, collection, "liia"); return promise.then((result) => { - should(elasticsearch._client.delete).be.calledWithMatch({ + should(elasticsearch.client._client.delete).be.calledWithMatch({ index: alias, id: "liia", refresh: undefined, @@ -1442,12 +1474,12 @@ describe("Test: ElasticSearch service", () => { }); it("should allow additional options", () => { - const promise = elasticsearch.delete(index, collection, "liia", { + const promise = elasticsearch.client.delete(index, collection, "liia", { refresh: "wait_for", }); return promise.then((result) => { - should(elasticsearch._client.delete).be.calledWithMatch({ + should(elasticsearch.client._client.delete).be.calledWithMatch({ index: alias, id: "liia", refresh: "wait_for", @@ -1458,14 +1490,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.delete fails", () => { - elasticsearch._client.delete.rejects(esClientError); + elasticsearch.client._client.delete.rejects(esClientError); - const promise = elasticsearch.delete(index, collection, "liia"); + const promise = elasticsearch.client.delete(index, collection, "liia"); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -1474,12 +1506,12 @@ describe("Test: ElasticSearch service", () => { describe("#updateByQuery", () => { beforeEach(() => { - sinon.stub(elasticsearch, "_getAllDocumentsFromQuery").resolves([ + sinon.stub(elasticsearch.client, "_getAllDocumentsFromQuery").resolves([ { _id: "_id1", _source: { name: "Ok" } }, { _id: "_id2", _source: { name: "Ok" } }, ]); - sinon.stub(elasticsearch, "mUpdate").resolves({ + sinon.stub(elasticsearch.client, "mUpdate").resolves({ items: [ { _id: "_id1", @@ -1495,7 +1527,7 @@ describe("Test: ElasticSearch service", () => { errors: [], }); - elasticsearch._client.indices.refresh.resolves({ + elasticsearch.client._client.indices.refresh.resolves({ _shards: 1, }); }); @@ -1518,7 +1550,7 @@ describe("Test: ElasticSearch service", () => { ]; it("should have updateByQuery capability", () => { - const promise = elasticsearch.updateByQuery( + const promise = elasticsearch.client.updateByQuery( index, collection, { filter: { term: { name: "Ok" } } }, @@ -1526,7 +1558,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch.mUpdate).be.calledWithMatch( + should(elasticsearch.client.mUpdate).be.calledWithMatch( index, collection, documents, @@ -1552,7 +1584,7 @@ describe("Test: ElasticSearch service", () => { }); it("should allow additional options", async () => { - const result = await elasticsearch.updateByQuery( + const result = await elasticsearch.client.updateByQuery( index, collection, { filter: "term" }, @@ -1560,14 +1592,16 @@ describe("Test: ElasticSearch service", () => { { refresh: "wait_for", size: 3, userId: "aschen" }, ); - should(elasticsearch._getAllDocumentsFromQuery).be.calledWithMatch({ - index: alias, - query: { filter: "term" }, - scroll: "5s", - size: 3, - }); + should(elasticsearch.client._getAllDocumentsFromQuery).be.calledWithMatch( + { + index: alias, + query: { filter: "term" }, + scroll: "5s", + size: 3, + }, + ); - should(elasticsearch.mUpdate).be.calledWithMatch( + should(elasticsearch.client.mUpdate).be.calledWithMatch( index, collection, documents, @@ -1587,9 +1621,9 @@ describe("Test: ElasticSearch service", () => { }); it("should reject if the number of impacted documents exceeds the configured limit", () => { - elasticsearch._getAllDocumentsFromQuery.restore(); + elasticsearch.client._getAllDocumentsFromQuery.restore(); - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ hits: { hits: [], total: { @@ -1602,7 +1636,7 @@ describe("Test: ElasticSearch service", () => { kuzzle.config.limits.documentsFetchCount = 2; return should( - elasticsearch.updateByQuery(index, collection, {}, {}), + elasticsearch.client.updateByQuery(index, collection, {}, {}), ).rejectedWith(SizeLimitError, { id: "services.storage.write_limit_exceeded", }); @@ -1632,7 +1666,7 @@ describe("Test: ElasticSearch service", () => { refresh: false, }; - elasticsearch._client.updateByQuery.resolves({ + elasticsearch.client._client.updateByQuery.resolves({ total: 42, updated: 42, failures: [], @@ -1640,14 +1674,16 @@ describe("Test: ElasticSearch service", () => { }); it("should have updateByQuery capabilities", async () => { - const result = await elasticsearch.bulkUpdateByQuery( + const result = await elasticsearch.client.bulkUpdateByQuery( index, collection, query, changes, ); - should(elasticsearch._client.updateByQuery).be.calledWithMatch(request); + should(elasticsearch.client._client.updateByQuery).be.calledWithMatch( + request, + ); should(result).match({ updated: 42, @@ -1657,17 +1693,25 @@ describe("Test: ElasticSearch service", () => { it("should allow additonnal option", async () => { request.refresh = "wait_for"; - await elasticsearch.bulkUpdateByQuery(index, collection, query, changes, { - refresh: "wait_for", - }); + await elasticsearch.client.bulkUpdateByQuery( + index, + collection, + query, + changes, + { + refresh: "wait_for", + }, + ); - should(elasticsearch._client.updateByQuery).be.calledWithMatch(request); + should(elasticsearch.client._client.updateByQuery).be.calledWithMatch( + request, + ); }); it("should reject if client.updateByQuery fails", () => { - elasticsearch._client.updateByQuery.rejects(esClientError); + elasticsearch.client._client.updateByQuery.rejects(esClientError); - const promise = elasticsearch.bulkUpdateByQuery( + const promise = elasticsearch.client.bulkUpdateByQuery( index, collection, query, @@ -1677,20 +1721,20 @@ describe("Test: ElasticSearch service", () => { return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); }); it("should reject if some failures occur", () => { - elasticsearch._client.updateByQuery.resolves({ + elasticsearch.client._client.updateByQuery.resolves({ total: 3, updated: 2, failures: [{ shardId: 42, reason: "error", foo: "bar" }], }); - const promise = elasticsearch.bulkUpdateByQuery( + const promise = elasticsearch.client.bulkUpdateByQuery( index, collection, query, @@ -1707,12 +1751,12 @@ describe("Test: ElasticSearch service", () => { describe("#deleteByQuery", () => { beforeEach(() => { - sinon.stub(elasticsearch, "_getAllDocumentsFromQuery").resolves([ + sinon.stub(elasticsearch.client, "_getAllDocumentsFromQuery").resolves([ { _id: "_id1", _source: "_source1" }, { _id: "_id2", _source: "_source2" }, ]); - elasticsearch._client.deleteByQuery.resolves({ + elasticsearch.client._client.deleteByQuery.resolves({ total: 2, deleted: 1, failures: [ @@ -1728,11 +1772,15 @@ describe("Test: ElasticSearch service", () => { }); it("should have deleteByQuery capability", async () => { - const result = await elasticsearch.deleteByQuery(index, collection, { - filter: "term", - }); + const result = await elasticsearch.client.deleteByQuery( + index, + collection, + { + filter: "term", + }, + ); - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { filter: "term" }, scroll: "5s", @@ -1741,14 +1789,16 @@ describe("Test: ElasticSearch service", () => { refresh: undefined, }); - should(elasticsearch._getAllDocumentsFromQuery).be.calledWithMatch({ - index: alias, - query: { filter: "term" }, - scroll: "5s", - from: undefined, - size: 1000, - refresh: undefined, - }); + should(elasticsearch.client._getAllDocumentsFromQuery).be.calledWithMatch( + { + index: alias, + query: { filter: "term" }, + scroll: "5s", + from: undefined, + size: 1000, + refresh: undefined, + }, + ); should(result).match({ documents: [ @@ -1762,14 +1812,14 @@ describe("Test: ElasticSearch service", () => { }); it("should allow additional options", async () => { - const result = await elasticsearch.deleteByQuery( + const result = await elasticsearch.client.deleteByQuery( index, collection, { filter: "term" }, { refresh: "wait_for", from: 1, size: 3 }, ); - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { filter: "term" }, max_docs: 3, @@ -1784,14 +1834,14 @@ describe("Test: ElasticSearch service", () => { }); it("should not fetch documents if fetch=false", async () => { - const result = await elasticsearch.deleteByQuery( + const result = await elasticsearch.client.deleteByQuery( index, collection, { filter: "term" }, { fetch: false }, ); - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { filter: "term" }, scroll: "5s", @@ -1800,7 +1850,7 @@ describe("Test: ElasticSearch service", () => { refresh: undefined, }); - should(elasticsearch._getAllDocumentsFromQuery).not.be.called(); + should(elasticsearch.client._getAllDocumentsFromQuery).not.be.called(); should(result).match({ documents: [], @@ -1811,23 +1861,23 @@ describe("Test: ElasticSearch service", () => { }); it("should rejects if client.deleteByQuery fails", () => { - elasticsearch._client.deleteByQuery.rejects(esClientError); + elasticsearch.client._client.deleteByQuery.rejects(esClientError); - const promise = elasticsearch.deleteByQuery(index, collection, { + const promise = elasticsearch.client.deleteByQuery(index, collection, { filter: "term", }); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); }); it("should reject if the query is empty", () => { - const promise = elasticsearch.deleteByQuery( + const promise = elasticsearch.client.deleteByQuery( index, collection, "not an object", @@ -1839,9 +1889,9 @@ describe("Test: ElasticSearch service", () => { }); it("should reject if the number of impacted documents exceeds the configured limit", () => { - elasticsearch._getAllDocumentsFromQuery.restore(); + elasticsearch.client._getAllDocumentsFromQuery.restore(); - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ hits: { hits: [], total: { @@ -1854,7 +1904,7 @@ describe("Test: ElasticSearch service", () => { kuzzle.config.limits.documentsFetchCount = 2; return should( - elasticsearch.deleteByQuery(index, collection, {}), + elasticsearch.client.deleteByQuery(index, collection, {}), ).rejectedWith(SizeLimitError, { id: "services.storage.write_limit_exceeded", }); @@ -1863,30 +1913,33 @@ describe("Test: ElasticSearch service", () => { describe("#deleteFields", () => { beforeEach(() => { - elasticsearch._client.get.resolves({ + elasticsearch.client._client.get.resolves({ _id: "liia", _version: 1, _source: { city: "Kathmandu", useless: "somevalue" }, }); - elasticsearch._client.index.resolves({ + elasticsearch.client._client.index.resolves({ _id: "liia", _version: 2, }); }); it("should support field removal capability", () => { - const promise = elasticsearch.deleteFields(index, collection, "liia", [ - "useless", - ]); + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + ); return promise.then((result) => { - should(elasticsearch._client.get).be.calledWithMatch({ + should(elasticsearch.client._client.get).be.calledWithMatch({ index: alias, id: "liia", }); - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, id: "liia", document: { @@ -1908,7 +1961,7 @@ describe("Test: ElasticSearch service", () => { }); it("should accept additional options", () => { - const promise = elasticsearch.deleteFields( + const promise = elasticsearch.client.deleteFields( index, collection, "liia", @@ -1917,12 +1970,12 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.get).be.calledWithMatch({ + should(elasticsearch.client._client.get).be.calledWithMatch({ index: alias, id: "liia", }); - should(elasticsearch._client.index).be.calledWithMatch({ + should(elasticsearch.client._client.index).be.calledWithMatch({ index: alias, id: "liia", document: { @@ -1944,33 +1997,39 @@ describe("Test: ElasticSearch service", () => { }); it("should throw a NotFoundError Exception if document does not exists", () => { - elasticsearch._client.get.rejects(esClientError); + elasticsearch.client._client.get.rejects(esClientError); - const promise = elasticsearch.deleteFields(index, collection, "liia", [ - "useless", - ]); + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + ); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); - should(elasticsearch._client.index).not.be.called(); + should(elasticsearch.client._client.index).not.be.called(); }); }); it("should return a rejected promise if client.index fails", () => { - elasticsearch._client.index.rejects(esClientError); + elasticsearch.client._client.index.rejects(esClientError); - const promise = elasticsearch.deleteFields(index, collection, "liia", [ - "useless", - ]); + const promise = elasticsearch.client.deleteFields( + index, + collection, + "liia", + ["useless"], + ); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -1998,17 +2057,17 @@ describe("Test: ElasticSearch service", () => { .onCall(1) .resolves(2); - elasticsearch._client.search.resolves({ + elasticsearch.client._client.search.resolves({ hits: hits1, _scroll_id: "scroll-id", }); - elasticsearch._client.scroll.resolves({ + elasticsearch.client._client.scroll.resolves({ hits: hits2, _scroll_id: "scroll-id", }); - const result = await elasticsearch.mExecute( + const result = await elasticsearch.client.mExecute( index, collection, { match: 21 }, @@ -2017,7 +2076,7 @@ describe("Test: ElasticSearch service", () => { should(result).match([1, 2]); - should(elasticsearch._client.search.firstCall.args[0]).match({ + should(elasticsearch.client._client.search.firstCall.args[0]).match({ index: alias, query: { match: 21 }, scroll: "5s", @@ -2031,7 +2090,7 @@ describe("Test: ElasticSearch service", () => { }); it("should reject if the query is empty", () => { - const promise = elasticsearch.mExecute( + const promise = elasticsearch.client.mExecute( index, collection, "not an object", @@ -2046,52 +2105,52 @@ describe("Test: ElasticSearch service", () => { describe("#createIndex", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: alias }, { alias: "@%nepali.liia" }, ]); - sinon.stub(elasticsearch, "_createHiddenCollection").resolves(); + sinon.stub(elasticsearch.client, "_createHiddenCollection").resolves(); }); afterEach(() => { - elasticsearch._createHiddenCollection.restore(); + elasticsearch.client._createHiddenCollection.restore(); }); it("should resolve and create a hidden collection if the index does not exist", async () => { - await elasticsearch.createIndex("lfiduras"); + await elasticsearch.client.createIndex("lfiduras"); - should(elasticsearch._createHiddenCollection).be.calledWithMatch( + should(elasticsearch.client._createHiddenCollection).be.calledWithMatch( "lfiduras", ); }); it("should reject if the index already exists", () => { - return should(elasticsearch.createIndex("nepali")).be.rejectedWith( + return should(elasticsearch.client.createIndex("nepali")).be.rejectedWith( PreconditionError, { id: "services.storage.index_already_exists" }, ); }); it("should return a rejected promise if client.cat.indices fails", () => { - elasticsearch._client.cat.aliases.rejects(esClientError); + elasticsearch.client._client.cat.aliases.rejects(esClientError); - const promise = elasticsearch.createIndex(index, collection, { + const promise = elasticsearch.client.createIndex(index, collection, { filter: "term", }); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); }); it("should reject if the index name is invalid", () => { - sinon.stub(elasticsearch, "isIndexNameValid").returns(false); + sinon.stub(elasticsearch.client, "isIndexNameValid").returns(false); - return should(elasticsearch.createIndex("foobar")).rejectedWith( + return should(elasticsearch.client.createIndex("foobar")).rejectedWith( BadRequestError, { id: "services.storage.invalid_index_name" }, ); @@ -2102,37 +2161,44 @@ describe("Test: ElasticSearch service", () => { let _checkMappings; beforeEach(() => { - _checkMappings = elasticsearch._checkMappings; + _checkMappings = elasticsearch.client._checkMappings; - elasticsearch._client.indices.create.resolves({}); - elasticsearch.hasCollection = sinon.stub().resolves(false); - elasticsearch._checkMappings = sinon.stub().resolves(); + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.client.hasCollection = sinon.stub().resolves(false); + elasticsearch.client._checkMappings = sinon.stub().resolves(); - sinon.stub(elasticsearch, "_createHiddenCollection").resolves(); - sinon.stub(elasticsearch, "_hasHiddenCollection").resolves(false); - sinon.stub(elasticsearch, "deleteCollection").resolves(); - sinon.stub(elasticsearch, "_getAvailableIndice").resolves(indice); - sinon.stub(elasticsearch, "_getWaitForActiveShards").returns(1); + sinon.stub(elasticsearch.client, "_createHiddenCollection").resolves(); + sinon.stub(elasticsearch.client, "_hasHiddenCollection").resolves(false); + sinon.stub(elasticsearch.client, "deleteCollection").resolves(); + sinon.stub(elasticsearch.client, "_getAvailableIndice").resolves(indice); + sinon.stub(elasticsearch.client, "_getWaitForActiveShards").returns(1); }); afterEach(() => { - elasticsearch._getAvailableIndice.restore(); + elasticsearch.client._getAvailableIndice.restore(); }); it("should allow creating a new collection and inject commonMappings", async () => { const settings = { index: { blocks: { write: true } } }; const mappings = { properties: { city: { type: "keyword" } } }; - const result = await elasticsearch.createCollection(index, collection, { - mappings, - settings, - }); + const result = await elasticsearch.client.createCollection( + index, + collection, + { + mappings, + settings, + }, + ); - should(elasticsearch.hasCollection).be.calledWith(index, collection); - should(elasticsearch._checkMappings).be.calledWithMatch({ + should(elasticsearch.client.hasCollection).be.calledWith( + index, + collection, + ); + should(elasticsearch.client._checkMappings).be.calledWithMatch({ properties: mappings.properties, }); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: indice, aliases: { [alias]: {} }, mappings: { @@ -2144,18 +2210,18 @@ describe("Test: ElasticSearch service", () => { }); should(result).be.null(); - should(elasticsearch.deleteCollection).not.be.called(); + should(elasticsearch.client.deleteCollection).not.be.called(); }); it("should delete the hidden collection if present", async () => { - elasticsearch._hasHiddenCollection.resolves(true); + elasticsearch.client._hasHiddenCollection.resolves(true); - await elasticsearch.createCollection(index, collection, {}); + await elasticsearch.client.createCollection(index, collection, {}); should(Mutex.prototype.lock).be.called(); should(Mutex.prototype.unlock).be.called(); - should(elasticsearch._hasHiddenCollection).be.calledWith(index); - should(elasticsearch.deleteCollection).be.calledWith( + should(elasticsearch.client._hasHiddenCollection).be.calledWith(index); + should(elasticsearch.client.deleteCollection).be.calledWith( index, "_kuzzle_keep", ); @@ -2164,11 +2230,15 @@ describe("Test: ElasticSearch service", () => { it("should allow to set dynamic and _meta fields", async () => { const mappings = { dynamic: "true", _meta: { some: "meta" } }; - const result = await elasticsearch.createCollection(index, collection, { - mappings, - }); + const result = await elasticsearch.client.createCollection( + index, + collection, + { + mappings, + }, + ); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: indice, aliases: { [alias]: {} }, mappings: { @@ -2182,16 +2252,16 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.indices.create fails", () => { - elasticsearch._client.indices.create.rejects(esClientError); + elasticsearch.client._client.indices.create.rejects(esClientError); - const promise = elasticsearch.createCollection(index, collection, { + const promise = elasticsearch.client.createCollection(index, collection, { mappings: { properties: { city: { type: "keyword" } } }, }); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -2208,21 +2278,21 @@ describe("Test: ElasticSearch service", () => { }, }; - elasticsearch._client.indices.create.rejects(esReject); + elasticsearch.client._client.indices.create.rejects(esReject); - const promise = elasticsearch.createCollection(index, collection, { + const promise = elasticsearch.client.createCollection(index, collection, { mappings: { properties: { city: { type: "keyword" } } }, }); return should(promise) .be.fulfilled() .then(() => { - should(elasticsearch._esWrapper.formatESError).not.be.called(); + should(elasticsearch.client._esWrapper.formatESError).not.be.called(); }); }); it("should reject with BadRequestError on wrong mapping", async () => { - elasticsearch._checkMappings = _checkMappings; + elasticsearch.client._checkMappings = _checkMappings; const mappings = { dinamic: "false", @@ -2233,7 +2303,7 @@ describe("Test: ElasticSearch service", () => { global.NODE_ENV = "development"; await should( - elasticsearch.createCollection(index, collection, { mappings }), + elasticsearch.client.createCollection(index, collection, { mappings }), ).be.rejectedWith({ message: 'Invalid mapping property "mappings.dinamic". Did you mean "dynamic"?', @@ -2242,7 +2312,7 @@ describe("Test: ElasticSearch service", () => { global.NODE_ENV = "production"; await should( - elasticsearch.createCollection(index, collection, { mappings }), + elasticsearch.client.createCollection(index, collection, { mappings }), ).be.rejectedWith({ message: 'Invalid mapping property "mappings.dinamic".', id: "services.storage.invalid_mapping", @@ -2268,16 +2338,16 @@ describe("Test: ElasticSearch service", () => { dynamic: true, }; - await elasticsearch.createCollection(index, collection, { + await elasticsearch.client.createCollection(index, collection, { mappings: mappings3, }); - should(elasticsearch._checkMappings).be.calledWithMatch({ + should(elasticsearch.client._checkMappings).be.calledWithMatch({ dynamic: "true", }); await should( - elasticsearch.createCollection(index, collection, { + elasticsearch.client.createCollection(index, collection, { mappings: mappings1, }), ).be.rejectedWith({ @@ -2286,7 +2356,7 @@ describe("Test: ElasticSearch service", () => { }); await should( - elasticsearch.createCollection(index, collection, { + elasticsearch.client.createCollection(index, collection, { mappings: mappings2, }), ).be.rejectedWith({ @@ -2298,16 +2368,19 @@ describe("Test: ElasticSearch service", () => { it("should call updateCollection if the collection already exists", async () => { const settings = { index: { blocks: { write: true } } }; const mappings = { properties: { city: { type: "keyword" } } }; - elasticsearch.hasCollection.resolves(true); - sinon.stub(elasticsearch, "updateCollection").resolves({}); + elasticsearch.client.hasCollection.resolves(true); + sinon.stub(elasticsearch.client, "updateCollection").resolves({}); - await elasticsearch.createCollection(index, collection, { + await elasticsearch.client.createCollection(index, collection, { mappings, settings, }); - should(elasticsearch.hasCollection).be.calledWith(index, collection); - should(elasticsearch.updateCollection).be.calledWithMatch( + should(elasticsearch.client.hasCollection).be.calledWith( + index, + collection, + ); + should(elasticsearch.client.updateCollection).be.calledWithMatch( index, collection, { @@ -2344,9 +2417,12 @@ describe("Test: ElasticSearch service", () => { }, }; - await elasticsearch.createCollection(index, collection, { mappings }); + await elasticsearch.client.createCollection(index, collection, { + mappings, + }); - const esReq = elasticsearch._client.indices.create.firstCall.args[0], + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0], expectedMapping = { _meta: undefined, dynamic: "false", @@ -2368,21 +2444,23 @@ describe("Test: ElasticSearch service", () => { }); it("should reject if the index name is invalid", () => { - sinon.stub(elasticsearch, "isIndexNameValid").returns(false); + sinon.stub(elasticsearch.client, "isIndexNameValid").returns(false); - return should(elasticsearch.createCollection("foo", "bar")).rejectedWith( - BadRequestError, - { id: "services.storage.invalid_index_name" }, - ); + return should( + elasticsearch.client.createCollection("foo", "bar"), + ).rejectedWith(BadRequestError, { + id: "services.storage.invalid_index_name", + }); }); it("should reject if the collection name is invalid", () => { - sinon.stub(elasticsearch, "isCollectionNameValid").returns(false); + sinon.stub(elasticsearch.client, "isCollectionNameValid").returns(false); - return should(elasticsearch.createCollection("foo", "bar")).rejectedWith( - BadRequestError, - { id: "services.storage.invalid_collection_name" }, - ); + return should( + elasticsearch.client.createCollection("foo", "bar"), + ).rejectedWith(BadRequestError, { + id: "services.storage.invalid_collection_name", + }); }); it("should use defaultSettings if none are provided", async () => { @@ -2391,9 +2469,10 @@ describe("Test: ElasticSearch service", () => { number_of_shards: 66, }; - await elasticsearch.createCollection(index, collection); + await elasticsearch.client.createCollection(index, collection); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.settings).eql(elasticsearch.config.defaultSettings); }); @@ -2408,9 +2487,12 @@ describe("Test: ElasticSearch service", () => { number_of_shards: 2, }; - await elasticsearch.createCollection(index, collection, { settings }); + await elasticsearch.client.createCollection(index, collection, { + settings, + }); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.settings).eql(settings); }); @@ -2424,9 +2506,12 @@ describe("Test: ElasticSearch service", () => { number_of_replicas: 1, }; - await elasticsearch.createCollection(index, collection, { settings }); + await elasticsearch.client.createCollection(index, collection, { + settings, + }); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.settings).eql({ number_of_replicas: 1, @@ -2435,19 +2520,23 @@ describe("Test: ElasticSearch service", () => { }); it("should wait for all shards to being active when using an Elasticsearch cluster", async () => { - elasticsearch._getWaitForActiveShards = sinon.stub().returns("all"); - await elasticsearch.createCollection(index, collection); + elasticsearch.client._getWaitForActiveShards = sinon + .stub() + .returns("all"); + await elasticsearch.client.createCollection(index, collection); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.wait_for_active_shards).eql("all"); }); it("should only wait for one shard to being active when using a single node", async () => { - elasticsearch._getWaitForActiveShards = sinon.stub().returns("1"); - await elasticsearch.createCollection(index, collection); + elasticsearch.client._getWaitForActiveShards = sinon.stub().returns("1"); + await elasticsearch.client.createCollection(index, collection); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.wait_for_active_shards).eql("1"); }); @@ -2455,7 +2544,7 @@ describe("Test: ElasticSearch service", () => { describe("#getMapping", () => { beforeEach(() => { - elasticsearch._client.indices.getMapping.resolves({ + elasticsearch.client._client.indices.getMapping.resolves({ [indice]: { mappings: { dynamic: true, @@ -2468,21 +2557,23 @@ describe("Test: ElasticSearch service", () => { }, }); - elasticsearch._esWrapper.getMapping = sinon + elasticsearch.client._esWrapper.getMapping = sinon .stub() .resolves({ foo: "bar" }); - sinon.stub(elasticsearch, "_getIndice").resolves(indice); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); }); afterEach(() => { - elasticsearch._getIndice.restore(); + elasticsearch.client._getIndice.restore(); }); it("should have getMapping capabilities", () => { - const promise = elasticsearch.getMapping(index, collection); + const promise = elasticsearch.client.getMapping(index, collection); return promise.then((result) => { - should(elasticsearch._client.indices.getMapping).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.getMapping, + ).be.calledWithMatch({ index: indice, }); @@ -2497,12 +2588,14 @@ describe("Test: ElasticSearch service", () => { }); it("should include kuzzleMeta if specified", () => { - const promise = elasticsearch.getMapping(index, collection, { + const promise = elasticsearch.client.getMapping(index, collection, { includeKuzzleMeta: true, }); return promise.then((result) => { - should(elasticsearch._client.indices.getMapping).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.getMapping, + ).be.calledWithMatch({ index: indice, }); @@ -2518,14 +2611,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.cat.indices fails", () => { - elasticsearch._client.indices.getMapping.rejects(esClientError); + elasticsearch.client._client.indices.getMapping.rejects(esClientError); - const promise = elasticsearch.getMapping(index, collection); + const promise = elasticsearch.client.getMapping(index, collection); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -2552,33 +2645,33 @@ describe("Test: ElasticSearch service", () => { settings = { index: { blocks: { write: true } } }; mappings = { properties: { city: { type: "keyword" } } }; - elasticsearch._client.indices.getSettings.resolves(oldSettings); - elasticsearch.updateMapping = sinon.stub().resolves(); - elasticsearch.updateSettings = sinon.stub().resolves(); - elasticsearch.updateSearchIndex = sinon.stub().resolves(); - sinon.stub(elasticsearch, "_getIndice").resolves(indice); + elasticsearch.client._client.indices.getSettings.resolves(oldSettings); + elasticsearch.client.updateMapping = sinon.stub().resolves(); + elasticsearch.client.updateSettings = sinon.stub().resolves(); + elasticsearch.client.updateSearchIndex = sinon.stub().resolves(); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); }); afterEach(() => { - elasticsearch._getIndice.restore(); + elasticsearch.client._getIndice.restore(); }); it("should call updateSettings, updateMapping", async () => { - elasticsearch.getMapping = sinon.stub().resolves({ + elasticsearch.client.getMapping = sinon.stub().resolves({ dynamic: "true", properties: { city: { type: "keyword" }, dynamic: "false" }, }); - await elasticsearch.updateCollection(index, collection, { + await elasticsearch.client.updateCollection(index, collection, { mappings, settings, }); - should(elasticsearch.updateSettings).be.calledWith( + should(elasticsearch.client.updateSettings).be.calledWith( index, collection, settings, ); - should(elasticsearch.updateMapping).be.calledWith( + should(elasticsearch.client.updateMapping).be.calledWith( index, collection, mappings, @@ -2586,36 +2679,36 @@ describe("Test: ElasticSearch service", () => { }); it("should call updateSettings and updateMapping", async () => { - elasticsearch.getMapping = sinon.stub().resolves({ + elasticsearch.client.getMapping = sinon.stub().resolves({ dynamic: "false", properties: { city: { type: "keyword" } }, }); - await elasticsearch.updateCollection(index, collection, { + await elasticsearch.client.updateCollection(index, collection, { mappings, settings, }); - should(elasticsearch.updateSettings).be.calledWith( + should(elasticsearch.client.updateSettings).be.calledWith( index, collection, settings, ); - should(elasticsearch.updateMapping).be.calledWith( + should(elasticsearch.client.updateMapping).be.calledWith( index, collection, mappings, ); - should(elasticsearch.updateSearchIndex).not.be.called(); + should(elasticsearch.client.updateSearchIndex).not.be.called(); }); it("should revert settings if updateMapping fail", () => { - elasticsearch.getMapping = sinon.stub().resolves({ + elasticsearch.client.getMapping = sinon.stub().resolves({ dynamic: "true", properties: { city: { type: "keyword" } }, }); - elasticsearch.updateMapping.rejects(); + elasticsearch.client.updateMapping.rejects(); - const promise = elasticsearch.updateCollection(index, collection, { + const promise = elasticsearch.client.updateCollection(index, collection, { mappings, settings, }); @@ -2623,12 +2716,14 @@ describe("Test: ElasticSearch service", () => { return should(promise) .be.rejected() .then(() => { - should(elasticsearch._client.indices.getSettings).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.getSettings, + ).be.calledWithMatch({ index: indice, }); - should(elasticsearch.updateSettings).be.calledTwice(); - should(elasticsearch.updateMapping).be.calledOnce(); - should(elasticsearch.updateSettings.getCall(1).args).be.eql([ + should(elasticsearch.client.updateSettings).be.calledTwice(); + should(elasticsearch.client.updateMapping).be.calledOnce(); + should(elasticsearch.client.updateSettings.getCall(1).args).be.eql([ index, collection, { index: { blocks: { write: false } } }, @@ -2637,7 +2732,7 @@ describe("Test: ElasticSearch service", () => { }); it("should calls updateSearchIndex if dynamic change from false to true", async () => { - elasticsearch.getMapping = sinon.stub().resolves({ + elasticsearch.client.getMapping = sinon.stub().resolves({ properties: { content: { dynamic: "false", @@ -2652,11 +2747,11 @@ describe("Test: ElasticSearch service", () => { }, }; - await elasticsearch.updateCollection(index, collection, { + await elasticsearch.client.updateCollection(index, collection, { mappings: newMappings, }); - should(elasticsearch.updateSearchIndex).be.calledOnce(); + should(elasticsearch.client.updateSearchIndex).be.calledOnce(); }); }); @@ -2664,7 +2759,7 @@ describe("Test: ElasticSearch service", () => { let newMapping, existingMapping, _checkMappings; beforeEach(() => { - _checkMappings = elasticsearch._checkMappings; + _checkMappings = elasticsearch.client._checkMappings; newMapping = { properties: { @@ -2685,20 +2780,22 @@ describe("Test: ElasticSearch service", () => { }, }; - elasticsearch.getMapping = sinon.stub().resolves(existingMapping); - elasticsearch._client.indices.putMapping.resolves({}); - elasticsearch._checkMappings = sinon.stub().resolves(); + elasticsearch.client.getMapping = sinon.stub().resolves(existingMapping); + elasticsearch.client._client.indices.putMapping.resolves({}); + elasticsearch.client._checkMappings = sinon.stub().resolves(); }); it("should have mapping capabilities", () => { - const promise = elasticsearch.updateMapping( + const promise = elasticsearch.client.updateMapping( index, collection, newMapping, ); return promise.then((result) => { - should(elasticsearch._client.indices.putMapping).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.putMapping, + ).be.calledWithMatch({ index: alias, dynamic: "strict", _meta: { meta: "data" }, @@ -2724,7 +2821,7 @@ describe("Test: ElasticSearch service", () => { }); it("should reject with BadRequestError on wrong mapping", async () => { - elasticsearch._checkMappings = _checkMappings; + elasticsearch.client._checkMappings = _checkMappings; newMapping = { dinamic: "false", properties: { @@ -2734,7 +2831,7 @@ describe("Test: ElasticSearch service", () => { global.NODE_ENV = "development"; await should( - elasticsearch.updateMapping(index, collection, newMapping), + elasticsearch.client.updateMapping(index, collection, newMapping), ).be.rejectedWith({ message: 'Invalid mapping property "mappings.dinamic". Did you mean "dynamic"?', @@ -2743,7 +2840,7 @@ describe("Test: ElasticSearch service", () => { global.NODE_ENV = "production"; await should( - elasticsearch.updateMapping(index, collection, newMapping), + elasticsearch.client.updateMapping(index, collection, newMapping), ).be.rejectedWith({ message: 'Invalid mapping property "mappings.dinamic".', id: "services.storage.invalid_mapping", @@ -2760,14 +2857,16 @@ describe("Test: ElasticSearch service", () => { _meta: { other: "meta" }, }; - const promise = elasticsearch.updateMapping( + const promise = elasticsearch.client.updateMapping( index, collection, newMapping, ); return promise.then((result) => { - should(elasticsearch._client.indices.putMapping).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.putMapping, + ).be.calledWithMatch({ index: alias, dynamic: "false", _meta: { other: "meta" }, @@ -2781,9 +2880,9 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client.cat.indices fails", () => { - elasticsearch._client.indices.putMapping.rejects(esClientError); + elasticsearch.client._client.indices.putMapping.rejects(esClientError); - const promise = elasticsearch.updateMapping( + const promise = elasticsearch.client.updateMapping( index, collection, newMapping, @@ -2792,7 +2891,7 @@ describe("Test: ElasticSearch service", () => { return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -2813,13 +2912,15 @@ describe("Test: ElasticSearch service", () => { }); it("should allow to change indice settings", async () => { - const result = await elasticsearch.updateSettings( + const result = await elasticsearch.client.updateSettings( index, collection, newSettings, ); - should(elasticsearch._client.indices.putSettings).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.putSettings, + ).be.calledWithMatch({ index: alias, body: { index: { @@ -2838,20 +2939,20 @@ describe("Test: ElasticSearch service", () => { analyzer: { customer_analyzers: {} }, }; - await elasticsearch.updateSettings(index, collection, newSettings); + await elasticsearch.client.updateSettings(index, collection, newSettings); - should(elasticsearch._client.indices.close).be.calledWithMatch({ + should(elasticsearch.client._client.indices.close).be.calledWithMatch({ index: alias, }); - should(elasticsearch._client.indices.open).be.calledWithMatch({ + should(elasticsearch.client._client.indices.open).be.calledWithMatch({ index: alias, }); }); it("should return a rejected promise if client.cat.putSettings fails", () => { - elasticsearch._client.indices.putSettings.rejects(esClientError); + elasticsearch.client._client.indices.putSettings.rejects(esClientError); - const promise = elasticsearch.updateSettings( + const promise = elasticsearch.client.updateSettings( index, collection, newSettings, @@ -2860,7 +2961,7 @@ describe("Test: ElasticSearch service", () => { return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -2869,11 +2970,11 @@ describe("Test: ElasticSearch service", () => { describe("#updateSearchIndex", () => { it("should call updateByQuery", async () => { - elasticsearch._client.updateByQuery = sinon.stub().resolves(); + elasticsearch.client._client.updateByQuery = sinon.stub().resolves(); - await elasticsearch.updateSearchIndex(index, collection); + await elasticsearch.client.updateSearchIndex(index, collection); - should(elasticsearch._client.updateByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.updateByQuery).be.calledWithMatch({ conflicts: "proceed", index: alias, refresh: true, @@ -2893,9 +2994,9 @@ describe("Test: ElasticSearch service", () => { }, }; - elasticsearch.getMapping = sinon.stub().resolves(existingMapping); + elasticsearch.client.getMapping = sinon.stub().resolves(existingMapping); - elasticsearch._client.indices.getSettings.resolves({ + elasticsearch.client._client.indices.getSettings.resolves({ "&nyc-open-data.yellow-taxi": { settings: { analysis: { @@ -2908,22 +3009,25 @@ describe("Test: ElasticSearch service", () => { }, }, }); - sinon.stub(elasticsearch, "_getIndice").resolves(indice); - sinon.stub(elasticsearch, "_getWaitForActiveShards").resolves(1); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); + sinon.stub(elasticsearch.client, "_getWaitForActiveShards").resolves(1); }); afterEach(() => { - elasticsearch._getIndice.restore(); + elasticsearch.client._getIndice.restore(); }); it("should delete and then create the collection with the same mapping", async () => { - const result = await elasticsearch.truncateCollection(index, collection); + const result = await elasticsearch.client.truncateCollection( + index, + collection, + ); - should(elasticsearch.getMapping).be.calledWith(index, collection); - should(elasticsearch._client.indices.delete).be.calledWithMatch({ + should(elasticsearch.client.getMapping).be.calledWith(index, collection); + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ index: indice, }); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: indice, aliases: { [alias]: {} }, mappings: { @@ -2942,51 +3046,63 @@ describe("Test: ElasticSearch service", () => { }, }, }); - should(elasticsearch._client.indices.getSettings).be.calledWithMatch({ + should( + elasticsearch.client._client.indices.getSettings, + ).be.calledWithMatch({ index: indice, }); should(result).be.null(); }); it("should return a rejected promise if client fails", () => { - elasticsearch._client.indices.delete.rejects(esClientError); + elasticsearch.client._client.indices.delete.rejects(esClientError); - const promise = elasticsearch.truncateCollection(index, collection); + const promise = elasticsearch.client.truncateCollection( + index, + collection, + ); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); }); it("should wait for all shards to be active when using an Elasticsearch cluster", async () => { - elasticsearch._getWaitForActiveShards = sinon.stub().resolves("all"); + elasticsearch.client._getWaitForActiveShards = sinon + .stub() + .resolves("all"); - await elasticsearch.truncateCollection(index, collection); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + await elasticsearch.client.truncateCollection(index, collection); + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.wait_for_active_shards).eql("all"); }); it("should only wait for the primary shard to be active when using a single node", async () => { - elasticsearch._getWaitForActiveShards = sinon.stub().resolves("1"); + elasticsearch.client._getWaitForActiveShards = sinon.stub().resolves("1"); - await elasticsearch.truncateCollection(index, collection); - const esReq = elasticsearch._client.indices.create.firstCall.args[0]; + await elasticsearch.client.truncateCollection(index, collection); + const esReq = + elasticsearch.client._client.indices.create.firstCall.args[0]; should(esReq.wait_for_active_shards).eql("1"); }); }); describe("#import", () => { - let getExpectedEsRequest, bulkReturnError, documents, bulkReturn; + let getExpectedEsRequest; + let bulkReturnError; + let documents; + let bulkReturn; beforeEach(() => { getExpectedEsRequest = ({ userId = null, refresh, timeout } = {}) => ({ - body: [ + operations: [ { index: { _id: 1, _index: alias } }, { firstName: "foo", @@ -3027,39 +3143,35 @@ describe("Test: ElasticSearch service", () => { }); bulkReturn = { - body: { - items: [ - { index: { status: 201, _id: 1, toto: 42 } }, - { index: { status: 201, _id: 2, toto: 42 } }, - { update: { status: 200, _id: 3, toto: 42 } }, - { delete: { status: 200, _id: 4, toto: 42 } }, - ], - errors: false, - }, + items: [ + { index: { status: 201, _id: 1, toto: 42 } }, + { index: { status: 201, _id: 2, toto: 42 } }, + { update: { status: 200, _id: 3, toto: 42 } }, + { delete: { status: 200, _id: 4, toto: 42 } }, + ], + errors: false, }; bulkReturnError = { - body: { - items: [ - { index: { status: 201, _id: 1, toto: 42 } }, - { index: { status: 201, _id: 2, toto: 42 } }, - { - update: { - status: 404, - _id: 42, - error: { type: "not_found", reason: "not found", toto: 42 }, - }, + items: [ + { index: { status: 201, _id: 1, toto: 42 } }, + { index: { status: 201, _id: 2, toto: 42 } }, + { + update: { + status: 404, + _id: 42, + error: { type: "not_found", reason: "not found", toto: 42 }, }, - { - delete: { - status: 404, - _id: 21, - error: { type: "not_found", reason: "not found", toto: 42 }, - }, + }, + { + delete: { + status: 404, + _id: 21, + error: { type: "not_found", reason: "not found", toto: 42 }, }, - ], - errors: true, - }, + }, + ], + errors: true, }; documents = [ @@ -3075,7 +3187,7 @@ describe("Test: ElasticSearch service", () => { { delete: { _id: 4 } }, ]; - elasticsearch._client.bulk.resolves(bulkReturn); + elasticsearch.client._client.bulk.resolves(bulkReturn); }); it("should support bulk data import", () => { @@ -3092,10 +3204,10 @@ describe("Test: ElasticSearch service", () => { { delete: { _id: 4 } }, ]; - const promise = elasticsearch.import(index, collection, documents); + const promise = elasticsearch.client.import(index, collection, documents); return promise.then((result) => { - should(elasticsearch._client.bulk).be.calledWithMatch( + should(elasticsearch.client._client.bulk).be.calledWithMatch( getExpectedEsRequest(), ); @@ -3112,14 +3224,19 @@ describe("Test: ElasticSearch service", () => { }); it("should inject additional options to esRequest", () => { - const promise = elasticsearch.import(index, collection, documents, { - refresh: "wait_for", - timeout: "10m", - userId: "aschen", - }); + const promise = elasticsearch.client.import( + index, + collection, + documents, + { + refresh: "wait_for", + timeout: "10m", + userId: "aschen", + }, + ); return promise.then(() => { - should(elasticsearch._client.bulk).be.calledWithMatch( + should(elasticsearch.client._client.bulk).be.calledWithMatch( getExpectedEsRequest({ refresh: "wait_for", timeout: "10m", @@ -3130,9 +3247,9 @@ describe("Test: ElasticSearch service", () => { }); it('should populate "errors" array for bulk data import with some errors', () => { - elasticsearch._client.bulk.resolves(bulkReturnError); + elasticsearch.client._client.bulk.resolves(bulkReturnError); - const promise = elasticsearch.import(index, collection, documents); + const promise = elasticsearch.client.import(index, collection, documents); return promise.then((result) => { should(result).match({ @@ -3161,14 +3278,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", () => { - elasticsearch._client.bulk.rejects(esClientError); + elasticsearch.client._client.bulk.rejects(esClientError); - const promise = elasticsearch.import(index, collection, documents); + const promise = elasticsearch.client.import(index, collection, documents); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -3177,7 +3294,7 @@ describe("Test: ElasticSearch service", () => { describe("#listCollections", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@&nepali.mehry" }, { alias: "@&nepali.liia" }, { alias: "@&nyc-open-data.taxi" }, @@ -3186,7 +3303,7 @@ describe("Test: ElasticSearch service", () => { }); it("should allow listing all available collections", () => { - const promise = elasticsearch.listCollections("nepali"); + const promise = elasticsearch.client.listCollections("nepali"); return promise.then((result) => { should(result).match(["mehry", "liia"]); @@ -3194,13 +3311,13 @@ describe("Test: ElasticSearch service", () => { }); it("should not list unauthorized collections", () => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@%nepali.mehry" }, { alias: "@%nepali.liia" }, { alias: "@%nyc-open-data.taxi" }, ]); - const promise = elasticsearch.listCollections("nepali"); + const promise = elasticsearch.client.listCollections("nepali"); return promise.then((result) => { should(result).match([]); @@ -3208,11 +3325,11 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", async () => { - elasticsearch._client.cat.aliases.rejects(esClientError); + elasticsearch.client._client.cat.aliases.rejects(esClientError); - await should(elasticsearch.listCollections(index)).be.rejected(); + await should(elasticsearch.client.listCollections(index)).be.rejected(); - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -3220,7 +3337,7 @@ describe("Test: ElasticSearch service", () => { describe("#listIndexes", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@&nepali.mehry" }, { alias: "@&nepali.liia" }, { alias: "@&nyc-open-data.taxi" }, @@ -3228,10 +3345,10 @@ describe("Test: ElasticSearch service", () => { }); it("should allow listing all available indexes", () => { - const promise = elasticsearch.listIndexes(); + const promise = elasticsearch.client.listIndexes(); return promise.then((result) => { - should(elasticsearch._client.cat.aliases).be.calledWithMatch({ + should(elasticsearch.client._client.cat.aliases).be.calledWithMatch({ format: "json", }); @@ -3240,14 +3357,14 @@ describe("Test: ElasticSearch service", () => { }); it("should not list unauthorized indexes", () => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@%nepali.mehry" }, { alias: "@%nepali.liia" }, { alias: "@%nyc-open-data.taxi" }, { alias: "@&vietnam.lfiduras" }, ]); - const promise = elasticsearch.listIndexes(); + const promise = elasticsearch.client.listIndexes(); return promise.then((result) => { should(result).match(["vietnam"]); @@ -3255,11 +3372,11 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", async () => { - elasticsearch._client.cat.aliases.rejects(esClientError); + elasticsearch.client._client.cat.aliases.rejects(esClientError); - await should(elasticsearch.listIndexes()).be.rejected(); + await should(elasticsearch.client.listIndexes()).be.rejected(); - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -3267,7 +3384,7 @@ describe("Test: ElasticSearch service", () => { describe("#listAliases", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { index: "&nepalu.mehry", alias: "@&nepali.mehry" }, { index: "&nepali.lia", alias: "@&nepali.liia" }, { index: "&nyc-open-data.taxi", alias: "@&nyc-open-data.taxi" }, @@ -3275,9 +3392,9 @@ describe("Test: ElasticSearch service", () => { }); it("should allow listing all available aliases", async () => { - const result = await elasticsearch.listAliases(); + const result = await elasticsearch.client.listAliases(); - should(elasticsearch._client.cat.aliases).be.calledWithMatch({ + should(elasticsearch.client._client.cat.aliases).be.calledWithMatch({ format: "json", }); @@ -3304,14 +3421,14 @@ describe("Test: ElasticSearch service", () => { }); it("should not list unauthorized aliases", async () => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { index: "%nepalu.mehry", alias: "@%nepali.mehry" }, { index: "%nepali.lia", alias: "@%nepali.liia" }, { index: "%nyc-open-data.taxi", alias: "@%nyc-open-data.taxi" }, { index: "&vietnam.lfiduras", alias: "@&vietnam.lfiduras" }, ]); - const result = await elasticsearch.listAliases(); + const result = await elasticsearch.client.listAliases(); should(result).match([ { @@ -3324,11 +3441,11 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", async () => { - elasticsearch._client.cat.aliases.rejects(esClientError); + elasticsearch.client._client.cat.aliases.rejects(esClientError); - await should(elasticsearch.listAliases()).be.rejected(); + await should(elasticsearch.client.listAliases()).be.rejected(); - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -3336,7 +3453,7 @@ describe("Test: ElasticSearch service", () => { describe("#deleteIndexes", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@&nepali.mehry", index: "&nepali.mehry" }, { alias: "@&nepali.liia", index: "&nepali.liia" }, { alias: "@&do-not.delete", index: "&do-not.delete" }, @@ -3345,10 +3462,13 @@ describe("Test: ElasticSearch service", () => { }); it("should allow to deletes multiple indexes", () => { - const promise = elasticsearch.deleteIndexes(["nepali", "nyc-open-data"]); + const promise = elasticsearch.client.deleteIndexes([ + "nepali", + "nyc-open-data", + ]); return promise.then((result) => { - should(elasticsearch._client.indices.delete).be.calledWithMatch({ + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ index: ["&nepali.mehry", "&nepali.liia", "&nyc-open-data.taxi"], }); @@ -3357,17 +3477,20 @@ describe("Test: ElasticSearch service", () => { }); it("should not delete unauthorized indexes", () => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@&nepali.mehry", index: "&nepali.mehry" }, { alias: "@&nepali.liia", index: "&nepali.liia" }, { alias: "@&do-not.delete", index: "&do-not.delete" }, { alias: "@%nyc-open-data.taxi", index: "%nyc-open-data.taxi" }, ]); - const promise = elasticsearch.deleteIndexes(["nepali", "nyc-open-data"]); + const promise = elasticsearch.client.deleteIndexes([ + "nepali", + "nyc-open-data", + ]); return promise.then((result) => { - should(elasticsearch._client.indices.delete).be.calledWithMatch({ + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ index: ["&nepali.mehry", "&nepali.liia"], }); @@ -3376,10 +3499,10 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", async () => { - elasticsearch._client.cat.aliases.rejects(esClientError); + elasticsearch.client._client.cat.aliases.rejects(esClientError); - await should(elasticsearch.listIndexes()).be.rejected(); - should(elasticsearch._esWrapper.formatESError).be.calledWith( + await should(elasticsearch.client.listIndexes()).be.rejected(); + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -3387,12 +3510,12 @@ describe("Test: ElasticSearch service", () => { describe("#deleteIndex", () => { it("should call deleteIndexes", () => { - elasticsearch.deleteIndexes = sinon.stub().resolves(); + elasticsearch.client.deleteIndexes = sinon.stub().resolves(); - const promise = elasticsearch.deleteIndex("nepali"); + const promise = elasticsearch.client.deleteIndex("nepali"); return promise.then((result) => { - should(elasticsearch.deleteIndexes).be.calledWith(["nepali"]); + should(elasticsearch.client.deleteIndexes).be.calledWith(["nepali"]); should(result).be.null(); }); @@ -3401,55 +3524,64 @@ describe("Test: ElasticSearch service", () => { describe("#deleteCollection", () => { beforeEach(() => { - sinon.stub(elasticsearch, "_createHiddenCollection").resolves(); - sinon.stub(elasticsearch, "_getIndice").resolves(indice); - sinon.stub(elasticsearch, "_checkIfAliasExists").resolves(undefined); + sinon.stub(elasticsearch.client, "_createHiddenCollection").resolves(); + sinon.stub(elasticsearch.client, "_getIndice").resolves(indice); + sinon + .stub(elasticsearch.client, "_checkIfAliasExists") + .resolves(undefined); }); afterEach(() => { - elasticsearch._getIndice.restore(); + elasticsearch.client._getIndice.restore(); }); it("should allow to delete a collection", async () => { - const result = await elasticsearch.deleteCollection(index, collection); + const result = await elasticsearch.client.deleteCollection( + index, + collection, + ); - should(elasticsearch._client.indices.delete).be.calledWithMatch({ + should(elasticsearch.client._client.indices.delete).be.calledWithMatch({ index: indice, }); should(result).be.null(); - should(elasticsearch._createHiddenCollection).be.called(); + should(elasticsearch.client._createHiddenCollection).be.called(); }); it("should create the hidden collection if the index is empty", async () => { - await elasticsearch.deleteCollection(index, collection); + await elasticsearch.client.deleteCollection(index, collection); - should(elasticsearch._createHiddenCollection).be.called(); + should(elasticsearch.client._createHiddenCollection).be.called(); }); it("should delete the remaining alias if it still exists", async () => { - elasticsearch._checkIfAliasExists.resolves(["myalias"]); - elasticsearch._client.indices.deleteAlias = sinon.stub().resolves(); + elasticsearch.client._checkIfAliasExists.resolves(["myalias"]); + elasticsearch.client._client.indices.deleteAlias = sinon + .stub() + .resolves(); - await elasticsearch.deleteCollection(index, collection); + await elasticsearch.client.deleteCollection(index, collection); - should(elasticsearch._client.indices.deleteAlias).be.called(); + should(elasticsearch.client._client.indices.deleteAlias).be.called(); }); }); describe("#refreshCollection", () => { it("should send a valid request to es client", () => { - elasticsearch._client.indices.refresh.resolves({ + elasticsearch.client._client.indices.refresh.resolves({ _shards: "shards", }); - const promise = elasticsearch.refreshCollection(index, collection); + const promise = elasticsearch.client.refreshCollection(index, collection); return promise.then((result) => { - should(elasticsearch._client.indices.refresh).be.calledWithMatch({ - index: alias, - }); + should(elasticsearch.client._client.indices.refresh).be.calledWithMatch( + { + index: alias, + }, + ); should(result).match({ _shards: "shards", @@ -3458,24 +3590,26 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", async () => { - elasticsearch._client.indices.refresh.rejects(esClientError); + elasticsearch.client._client.indices.refresh.rejects(esClientError); await should( - elasticsearch.refreshCollection(index, collection), + elasticsearch.client.refreshCollection(index, collection), ).rejected(); - should(elasticsearch._esWrapper.formatESError).calledWith(esClientError); + should(elasticsearch.client._esWrapper.formatESError).calledWith( + esClientError, + ); }); }); describe("#exists", () => { it("should have document exists capability", () => { - elasticsearch._client.exists.resolves(true); + elasticsearch.client._client.exists.resolves(true); - const promise = elasticsearch.exists(index, collection, "liia"); + const promise = elasticsearch.client.exists(index, collection, "liia"); return promise.then((result) => { - should(elasticsearch._client.exists).be.calledWithMatch({ + should(elasticsearch.client._client.exists).be.calledWithMatch({ index: alias, id: "liia", }); @@ -3485,14 +3619,14 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", () => { - elasticsearch._client.exists.rejects(esClientError); + elasticsearch.client._client.exists.rejects(esClientError); - const promise = elasticsearch.exists(index, collection, "liia"); + const promise = elasticsearch.client.exists(index, collection, "liia"); return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -3501,28 +3635,28 @@ describe("Test: ElasticSearch service", () => { describe("#hasIndex", () => { it("should call list indexes and return true if index exists", () => { - elasticsearch.listIndexes = sinon + elasticsearch.client.listIndexes = sinon .stub() .resolves(["nepali", "nyc-open-data"]); - const promise = elasticsearch.hasIndex("nepali"); + const promise = elasticsearch.client.hasIndex("nepali"); return promise.then((result) => { - should(elasticsearch.listIndexes).be.called(); + should(elasticsearch.client.listIndexes).be.called(); should(result).be.eql(true); }); }); it("should call list indexes and return false if index does not exists", () => { - elasticsearch.listIndexes = sinon + elasticsearch.client.listIndexes = sinon .stub() .resolves(["nepali", "nyc-open-data"]); - const promise = elasticsearch.hasIndex("vietnam"); + const promise = elasticsearch.client.hasIndex("vietnam"); return promise.then((result) => { - should(elasticsearch.listIndexes).be.called(); + should(elasticsearch.client.listIndexes).be.called(); should(result).be.eql(false); }); @@ -3531,24 +3665,28 @@ describe("Test: ElasticSearch service", () => { describe("#hasCollection", () => { it("should call list collections and return true if collection exists", () => { - elasticsearch.listCollections = sinon.stub().resolves(["liia", "mehry"]); + elasticsearch.client.listCollections = sinon + .stub() + .resolves(["liia", "mehry"]); - const promise = elasticsearch.hasCollection("nepali", "liia"); + const promise = elasticsearch.client.hasCollection("nepali", "liia"); return promise.then((result) => { - should(elasticsearch.listCollections).be.called(); + should(elasticsearch.client.listCollections).be.called(); should(result).be.eql(true); }); }); it("should call list collections and return false if collection does not exists", () => { - elasticsearch.listCollections = sinon.stub().resolves(["liia", "mehry"]); + elasticsearch.client.listCollections = sinon + .stub() + .resolves(["liia", "mehry"]); - const promise = elasticsearch.hasCollection("nepali", "lfiduras"); + const promise = elasticsearch.client.hasCollection("nepali", "lfiduras"); return promise.then((result) => { - should(elasticsearch.listCollections).be.called(); + should(elasticsearch.client.listCollections).be.called(); should(result).be.eql(false); }); @@ -3580,22 +3718,22 @@ describe("Test: ElasticSearch service", () => { mExecuteResult = { items: [], errors: [] }; - elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); }); it("should do a mGet request if we need to get some documents", () => { - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [], }); - const promise = elasticsearch.mCreate( + const promise = elasticsearch.client.mCreate( index, collection, documentsWithIds, ); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ index: alias, docs: [{ _id: "liia", _source: false }], }); @@ -3615,7 +3753,7 @@ describe("Test: ElasticSearch service", () => { { _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3626,18 +3764,18 @@ describe("Test: ElasticSearch service", () => { }); it("should reject already existing documents", () => { - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [{ _id: "liia", found: true }], }); - const promise = elasticsearch.mCreate( + const promise = elasticsearch.client.mCreate( index, collection, documentsWithIds, ); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ index: alias, docs: [{ _id: "liia", _source: false }], }); @@ -3663,7 +3801,7 @@ describe("Test: ElasticSearch service", () => { }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, rejected, @@ -3674,14 +3812,14 @@ describe("Test: ElasticSearch service", () => { }); it("should not do a mGet request if we didn't need to get some documents", () => { - const promise = elasticsearch.mCreate( + const promise = elasticsearch.client.mCreate( index, collection, documentsWithoutIds, ); return promise.then((result) => { - should(elasticsearch._client.mget).not.be.called(); + should(elasticsearch.client._client.mget).not.be.called(); const esRequest = { index: alias, @@ -3698,7 +3836,7 @@ describe("Test: ElasticSearch service", () => { { _source: { city: "Kathmandu", ...kuzzleMeta } }, { _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3710,7 +3848,7 @@ describe("Test: ElasticSearch service", () => { it("should allow additional options", () => { kuzzleMeta._kuzzle_info.author = "aschen"; - const promise = elasticsearch.mCreate( + const promise = elasticsearch.client.mCreate( index, collection, documentsWithoutIds, @@ -3718,7 +3856,7 @@ describe("Test: ElasticSearch service", () => { ); return promise.then((result) => { - should(elasticsearch._client.mget).not.be.called(); + should(elasticsearch.client._client.mget).not.be.called(); const esRequest = { index: alias, @@ -3735,7 +3873,7 @@ describe("Test: ElasticSearch service", () => { { _source: { city: "Kathmandu", ...kuzzleMeta } }, { _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3766,11 +3904,11 @@ describe("Test: ElasticSearch service", () => { mExecuteResult = { items: [], errors: [] }; - elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); }); it("should call _mExecute with formated documents and source flag", async () => { - const promise = elasticsearch.mCreateOrReplace( + const promise = elasticsearch.client.mCreateOrReplace( index, collection, documents, @@ -3794,7 +3932,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3805,7 +3943,7 @@ describe("Test: ElasticSearch service", () => { }); it("should call _mExecute with formated documents", () => { - const promise = elasticsearch.mCreateOrReplace( + const promise = elasticsearch.client.mCreateOrReplace( index, collection, documents, @@ -3827,7 +3965,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3840,7 +3978,7 @@ describe("Test: ElasticSearch service", () => { it("should allow additional options", () => { kuzzleMeta._kuzzle_info.author = "aschen"; - const promise = elasticsearch.mCreateOrReplace( + const promise = elasticsearch.client.mCreateOrReplace( index, collection, documents, @@ -3863,7 +4001,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3874,7 +4012,7 @@ describe("Test: ElasticSearch service", () => { }); it("should not inject kuzzle meta when specified", () => { - const promise = elasticsearch.mCreateOrReplace( + const promise = elasticsearch.client.mCreateOrReplace( index, collection, documents, @@ -3897,7 +4035,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu" } }, { _id: "liia", _source: { city: "Ho Chi Minh City" } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -3908,11 +4046,16 @@ describe("Test: ElasticSearch service", () => { }); it('should forward the "limits" option to mExecute', async () => { - await elasticsearch.mCreateOrReplace(index, collection, documents, { - limits: false, - }); + await elasticsearch.client.mCreateOrReplace( + index, + collection, + documents, + { + limits: false, + }, + ); - const options = elasticsearch._mExecute.getCall(0).args[3]; + const options = elasticsearch.client._mExecute.getCall(0).args[3]; should(options.limits).be.false(); }); }); @@ -3949,11 +4092,15 @@ describe("Test: ElasticSearch service", () => { errors: [], }; - elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); }); it("should call _mExecute with formated documents", () => { - const promise = elasticsearch.mUpdate(index, collection, documents); + const promise = elasticsearch.client.mUpdate( + index, + collection, + documents, + ); return promise.then((result) => { const esRequest = { @@ -3985,7 +4132,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4010,12 +4157,17 @@ describe("Test: ElasticSearch service", () => { it("should allow additional options", () => { kuzzleMeta._kuzzle_info.updater = "aschen"; - const promise = elasticsearch.mUpdate(index, collection, documents, { - refresh: "wait_for", - retryOnConflict: 2, - timeout: "10m", - userId: "aschen", - }); + const promise = elasticsearch.client.mUpdate( + index, + collection, + documents, + { + refresh: "wait_for", + retryOnConflict: 2, + timeout: "10m", + userId: "aschen", + }, + ); return promise.then(() => { const esRequest = { @@ -4033,7 +4185,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4047,7 +4199,11 @@ describe("Test: ElasticSearch service", () => { { body: { city: "Ho Chi Minh City" } }, ]; - const promise = elasticsearch.mUpdate(index, collection, documents); + const promise = elasticsearch.client.mUpdate( + index, + collection, + documents, + ); return promise.then(() => { const esRequest = { @@ -4080,7 +4236,7 @@ describe("Test: ElasticSearch service", () => { }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, rejected, @@ -4186,13 +4342,17 @@ describe("Test: ElasticSearch service", () => { errors: [], }; - elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); }); it("should call _mExecute with formated documents", async () => { - const result = await elasticsearch.mUpsert(index, collection, documents); + const result = await elasticsearch.client.mUpsert( + index, + collection, + documents, + ); - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4220,9 +4380,13 @@ describe("Test: ElasticSearch service", () => { esRequest.operations[3].upsert.country = "Vietnam"; toImport[1]._source.default.country = "Vietnam"; - const result = await elasticsearch.mUpsert(index, collection, documents); + const result = await elasticsearch.client.mUpsert( + index, + collection, + documents, + ); - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4253,14 +4417,14 @@ describe("Test: ElasticSearch service", () => { esRequest.refresh = "wait_for"; esRequest.timeout = "10m"; - await elasticsearch.mUpsert(index, collection, documents, { + await elasticsearch.client.mUpsert(index, collection, documents, { refresh: "wait_for", retryOnConflict: 42, timeout: "10m", userId: "aschen", }); - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4279,9 +4443,9 @@ describe("Test: ElasticSearch service", () => { }, ]; - await elasticsearch.mUpsert(index, collection, documents); + await elasticsearch.client.mUpsert(index, collection, documents); - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, rejected, @@ -4290,11 +4454,15 @@ describe("Test: ElasticSearch service", () => { it('should return the right "_created" result on a document creation', async () => { mExecuteResult.items[1].result = "created"; - elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); - const result = await elasticsearch.mUpsert(index, collection, documents); + const result = await elasticsearch.client.mUpsert( + index, + collection, + documents, + ); - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4338,9 +4506,9 @@ describe("Test: ElasticSearch service", () => { mExecuteResult = { items: [], errors: [] }; - elasticsearch._mExecute = sinon.stub().resolves(mExecuteResult); + elasticsearch.client._mExecute = sinon.stub().resolves(mExecuteResult); - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [ { _id: "mehry", found: true }, { _id: "liia", found: true }, @@ -4349,10 +4517,14 @@ describe("Test: ElasticSearch service", () => { }); it("should get documents and then format them for _mExecute", () => { - const promise = elasticsearch.mReplace(index, collection, documents); + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + ); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ index: alias, docs: [ { _id: "mehry", _source: false }, @@ -4374,7 +4546,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4385,17 +4557,21 @@ describe("Test: ElasticSearch service", () => { }); it("should add not found documents to rejected", () => { - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [ { _id: "mehry", found: true }, { _id: "liia", found: false }, ], }); - const promise = elasticsearch.mReplace(index, collection, documents); + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + ); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ index: alias, docs: [ { _id: "mehry", _source: false }, @@ -4424,7 +4600,7 @@ describe("Test: ElasticSearch service", () => { status: 404, }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, rejected, @@ -4439,14 +4615,18 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", body: { city: "Kathmandu" } }, { body: { city: "Ho Chi Minh City" } }, ]; - elasticsearch._client.mget.resolves({ + elasticsearch.client._client.mget.resolves({ docs: [{ _id: "mehry", found: true }], }); - const promise = elasticsearch.mReplace(index, collection, documents); + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + ); return promise.then((result) => { - should(elasticsearch._client.mget).be.calledWithMatch({ + should(elasticsearch.client._client.mget).be.calledWithMatch({ index: alias, docs: [{ _id: "mehry", _source: false }], }); @@ -4469,7 +4649,7 @@ describe("Test: ElasticSearch service", () => { status: 400, }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, rejected, @@ -4482,11 +4662,16 @@ describe("Test: ElasticSearch service", () => { it("should allow additional options", () => { kuzzleMeta._kuzzle_info.author = "aschen"; - const promise = elasticsearch.mReplace(index, collection, documents, { - refresh: "wait_for", - timeout: "10m", - userId: "aschen", - }); + const promise = elasticsearch.client.mReplace( + index, + collection, + documents, + { + refresh: "wait_for", + timeout: "10m", + userId: "aschen", + }, + ); return promise.then((result) => { const esRequest = { @@ -4503,7 +4688,7 @@ describe("Test: ElasticSearch service", () => { { _id: "mehry", _source: { city: "Kathmandu", ...kuzzleMeta } }, { _id: "liia", _source: { city: "Ho Chi Minh City", ...kuzzleMeta } }, ]; - should(elasticsearch._mExecute).be.calledWithMatch( + should(elasticsearch.client._mExecute).be.calledWithMatch( esRequest, toImport, [], @@ -4520,22 +4705,22 @@ describe("Test: ElasticSearch service", () => { beforeEach(() => { documentIds = ["mehry", "liia"]; - elasticsearch._getAllDocumentsFromQuery = sinon.stub().resolves([ + elasticsearch.client._getAllDocumentsFromQuery = sinon.stub().resolves([ { _id: "mehry", _source: { city: "Kathmandu" } }, { _id: "liia", _source: { city: "Ho Chi Minh City" } }, ]); - elasticsearch._client.deleteByQuery.resolves({ + elasticsearch.client._client.deleteByQuery.resolves({ total: 2, deleted: 2, failures: [], }); - elasticsearch._client.indices.refresh.resolves({ + elasticsearch.client._client.indices.refresh.resolves({ _shards: 1, }); - elasticsearch.mGet = sinon.stub().resolves({ + elasticsearch.client.mGet = sinon.stub().resolves({ items: [ { _id: "mehry", _source: { city: "Kathmandu" } }, { _id: "liia", _source: { city: "Ho Chi Minh City" } }, @@ -4544,22 +4729,22 @@ describe("Test: ElasticSearch service", () => { }); it("should allow to delete multiple documents with deleteByQuery", async () => { - const result = await elasticsearch.mDelete( + const result = await elasticsearch.client.mDelete( index, collection, documentIds, ); - should(elasticsearch._client.indices.refresh).be.calledWith({ + should(elasticsearch.client._client.indices.refresh).be.calledWith({ index: `@&${index}.${collection}`, }); - should(elasticsearch.mGet).be.calledWithMatch(index, collection, [ + should(elasticsearch.client.mGet).be.calledWithMatch(index, collection, [ "mehry", "liia", ]); - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { ids: { values: ["mehry", "liia"] } }, scroll: "5s", @@ -4575,19 +4760,24 @@ describe("Test: ElasticSearch service", () => { }); it("should add non existing documents to rejected", () => { - elasticsearch.mGet = sinon.stub().resolves({ + elasticsearch.client.mGet = sinon.stub().resolves({ items: [{ _id: "mehry", _source: { city: "Kathmandu" } }], }); - const promise = elasticsearch.mDelete(index, collection, documentIds); + const promise = elasticsearch.client.mDelete( + index, + collection, + documentIds, + ); return promise.then((result) => { - should(elasticsearch.mGet).be.calledWithMatch(index, collection, [ - "mehry", - "liia", - ]); + should(elasticsearch.client.mGet).be.calledWithMatch( + index, + collection, + ["mehry", "liia"], + ); - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { ids: { values: ["mehry"] } }, scroll: "5s", @@ -4601,18 +4791,23 @@ describe("Test: ElasticSearch service", () => { }); it("should add document with ID non string to rejected", () => { - elasticsearch.mGet = sinon.stub().resolves({ + elasticsearch.client.mGet = sinon.stub().resolves({ items: [{ _id: "mehry", _source: { city: "Kathmandu" } }], }); - const promise = elasticsearch.mDelete(index, collection, ["mehry", 42]); + const promise = elasticsearch.client.mDelete(index, collection, [ + "mehry", + 42, + ]); return promise.then((result) => { - should(elasticsearch.mGet).be.calledWithMatch(index, collection, [ - "mehry", - ]); + should(elasticsearch.client.mGet).be.calledWithMatch( + index, + collection, + ["mehry"], + ); - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { ids: { values: ["mehry"] } }, scroll: "5s", @@ -4628,12 +4823,17 @@ describe("Test: ElasticSearch service", () => { }); it("should allow additional options", () => { - const promise = elasticsearch.mDelete(index, collection, documentIds, { - refresh: "wait_for", - }); + const promise = elasticsearch.client.mDelete( + index, + collection, + documentIds, + { + refresh: "wait_for", + }, + ); return promise.then(() => { - should(elasticsearch._client.deleteByQuery).be.calledWithMatch({ + should(elasticsearch.client._client.deleteByQuery).be.calledWithMatch({ index: alias, query: { ids: { values: ["mehry", "liia"] } }, scroll: "5s", @@ -4670,7 +4870,7 @@ describe("Test: ElasticSearch service", () => { }, ]; - elasticsearch._client.bulk.resolves({ + elasticsearch.client._client.bulk.resolves({ items: [ { index: { @@ -4695,14 +4895,14 @@ describe("Test: ElasticSearch service", () => { }); it("should call client.bulk and separate success from errors", () => { - const promise = elasticsearch._mExecute( + const promise = elasticsearch.client._mExecute( esRequest, documents, partialErrors, ); return promise.then((result) => { - should(elasticsearch._client.bulk).be.calledWithMatch(esRequest); + should(elasticsearch.client._client.bulk).be.calledWithMatch(esRequest); const expectedResult = [ { @@ -4734,10 +4934,14 @@ describe("Test: ElasticSearch service", () => { }); it("should not call bulk if there is no documents", () => { - const promise = elasticsearch._mExecute(esRequest, [], partialErrors); + const promise = elasticsearch.client._mExecute( + esRequest, + [], + partialErrors, + ); return promise.then((result) => { - should(elasticsearch._client.bulk).not.be.called(); + should(elasticsearch.client._client.bulk).not.be.called(); const expectedErrors = [ { @@ -4755,7 +4959,7 @@ describe("Test: ElasticSearch service", () => { it("should reject if limit document reached", () => { kuzzle.config.limits.documentsWriteCount = 1; - const promise = elasticsearch._mExecute( + const promise = elasticsearch.client._mExecute( esRequest, documents, partialErrors, @@ -4769,7 +4973,7 @@ describe("Test: ElasticSearch service", () => { it('should not reject if the documents limit is reached but the "limits" option is false', () => { kuzzle.config.limits.documentsWriteCount = 1; - const promise = elasticsearch._mExecute( + const promise = elasticsearch.client._mExecute( esRequest, documents, partialErrors, @@ -4780,9 +4984,9 @@ describe("Test: ElasticSearch service", () => { }); it("should return a rejected promise if client fails", () => { - elasticsearch._client.bulk.rejects(esClientError); + elasticsearch.client._client.bulk.rejects(esClientError); - const promise = elasticsearch._mExecute( + const promise = elasticsearch.client._mExecute( esRequest, documents, partialErrors, @@ -4791,7 +4995,7 @@ describe("Test: ElasticSearch service", () => { return should(promise) .be.rejected() .then(() => { - should(elasticsearch._esWrapper.formatESError).be.calledWith( + should(elasticsearch.client._esWrapper.formatESError).be.calledWith( esClientError, ); }); @@ -4813,10 +5017,8 @@ describe("Test: ElasticSearch service", () => { }, }; - const { rejected, extractedDocuments } = elasticsearch._extractMDocuments( - documents, - kuzzleMeta, - ); + const { rejected, extractedDocuments } = + elasticsearch.client._extractMDocuments(documents, kuzzleMeta); should(rejected).match([ { @@ -4836,19 +5038,21 @@ describe("Test: ElasticSearch service", () => { describe("#isIndexNameValid", () => { it("should allow a valid index name", () => { - should(elasticsearch.isIndexNameValid("foobar")).be.true(); + should(elasticsearch.client.isIndexNameValid("foobar")).be.true(); }); it("should not allow empty index names", () => { - should(elasticsearch.isIndexNameValid("")).be.false(); + should(elasticsearch.client.isIndexNameValid("")).be.false(); }); it("should not allow uppercase chars", () => { - should(elasticsearch.isIndexNameValid("bAr")).be.false(); + should(elasticsearch.client.isIndexNameValid("bAr")).be.false(); }); it("should not allow index names that are too long", () => { - return should(elasticsearch.isIndexNameValid("Ӣ".repeat(64))).be.false(); + return should( + elasticsearch.client.isIndexNameValid("Ӣ".repeat(64)), + ).be.false(); }); it("should not allow forbidden chars in the name", () => { @@ -4857,27 +5061,27 @@ describe("Test: ElasticSearch service", () => { for (let i = 0; i < forbidden.length; i++) { const name = `foo${forbidden[i]}bar`; - should(elasticsearch.isIndexNameValid(name)).be.false(); + should(elasticsearch.client.isIndexNameValid(name)).be.false(); } }); }); describe("#isCollectionNameValid", () => { it("should allow a valid collection name", () => { - should(elasticsearch.isCollectionNameValid("foobar")).be.true(); + should(elasticsearch.client.isCollectionNameValid("foobar")).be.true(); }); it("should not allow empty collection names", () => { - should(elasticsearch.isCollectionNameValid("")).be.false(); + should(elasticsearch.client.isCollectionNameValid("")).be.false(); }); it("should not allow uppercase chars", () => { - should(elasticsearch.isCollectionNameValid("bAr")).be.false(); + should(elasticsearch.client.isCollectionNameValid("bAr")).be.false(); }); it("should not allow collection names that are too long", () => { return should( - elasticsearch.isCollectionNameValid("Ӣ".repeat(64)), + elasticsearch.client.isCollectionNameValid("Ӣ".repeat(64)), ).be.false(); }); @@ -4887,14 +5091,14 @@ describe("Test: ElasticSearch service", () => { for (let i = 0; i < forbidden.length; i++) { const name = `foo${forbidden[i]}bar`; - should(elasticsearch.isCollectionNameValid(name)).be.false(); + should(elasticsearch.client.isCollectionNameValid(name)).be.false(); } }); }); describe("#getSchema", () => { beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([ + elasticsearch.client._client.cat.aliases.resolves([ { alias: "@&nepali.mehry" }, { alias: "@&nepali._kuzzle_keep" }, { alias: "@&istanbul._kuzzle_keep" }, @@ -4902,7 +5106,7 @@ describe("Test: ElasticSearch service", () => { }); it("should returns the DB schema without hidden collections", async () => { - const schema = await elasticsearch.getSchema(); + const schema = await elasticsearch.client.getSchema(); should(schema).be.eql({ nepali: ["mehry"], istanbul: [], @@ -4915,22 +5119,24 @@ describe("Test: ElasticSearch service", () => { const hiddenAlias = `@${hiddenIndice}`; beforeEach(() => { - elasticsearch._client.cat.aliases.resolves([]); + elasticsearch.client._client.cat.aliases.resolves([]); - sinon.stub(elasticsearch, "_getAvailableIndice").resolves(hiddenIndice); - sinon.stub(elasticsearch, "_getWaitForActiveShards").returns(1); + sinon + .stub(elasticsearch.client, "_getAvailableIndice") + .resolves(hiddenIndice); + sinon.stub(elasticsearch.client, "_getWaitForActiveShards").returns(1); }); afterEach(() => { - elasticsearch._getAvailableIndice.restore(); + elasticsearch.client._getAvailableIndice.restore(); }); it("creates the hidden collection", async () => { - elasticsearch._client.indices.create.resolves({}); + elasticsearch.client._client.indices.create.resolves({}); - await elasticsearch._createHiddenCollection("nisantasi"); + await elasticsearch.client._createHiddenCollection("nisantasi"); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: hiddenIndice, aliases: { [hiddenAlias]: {} }, settings: { @@ -4943,23 +5149,25 @@ describe("Test: ElasticSearch service", () => { }); it("does not create the hidden collection if it already exists", async () => { - elasticsearch._client.cat.aliases.resolves([{ alias: hiddenAlias }]); + elasticsearch.client._client.cat.aliases.resolves([ + { alias: hiddenAlias }, + ]); - await elasticsearch._createHiddenCollection("nisantasi"); + await elasticsearch.client._createHiddenCollection("nisantasi"); - should(elasticsearch._client.indices.create).not.be.called(); + should(elasticsearch.client._client.indices.create).not.be.called(); }); it("does create hidden collection based on global settings", async () => { - elasticsearch._client.indices.create.resolves({}); + elasticsearch.client._client.indices.create.resolves({}); elasticsearch.config.defaultSettings = { number_of_shards: 42, number_of_replicas: 42, }; - await elasticsearch._createHiddenCollection("nisantasi"); + await elasticsearch.client._createHiddenCollection("nisantasi"); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: hiddenIndice, aliases: { [hiddenAlias]: {} }, settings: { @@ -4972,11 +5180,13 @@ describe("Test: ElasticSearch service", () => { }); it("should wait for all shards to being active when using an Elasticsearch cluster", async () => { - elasticsearch._client.indices.create.resolves({}); - elasticsearch._getWaitForActiveShards = sinon.stub().returns("all"); - await elasticsearch._createHiddenCollection("nisantasi"); + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.client._getWaitForActiveShards = sinon + .stub() + .returns("all"); + await elasticsearch.client._createHiddenCollection("nisantasi"); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: hiddenIndice, aliases: { [hiddenAlias]: {} }, settings: { @@ -4988,11 +5198,11 @@ describe("Test: ElasticSearch service", () => { }); it("should wait for only one shard to being active when using a single node Elasticsearch cluster", async () => { - elasticsearch._client.indices.create.resolves({}); - elasticsearch._getWaitForActiveShards = sinon.stub().returns(1); - await elasticsearch._createHiddenCollection("nisantasi"); + elasticsearch.client._client.indices.create.resolves({}); + elasticsearch.client._getWaitForActiveShards = sinon.stub().returns(1); + await elasticsearch.client._createHiddenCollection("nisantasi"); - should(elasticsearch._client.indices.create).be.calledWithMatch({ + should(elasticsearch.client._client.indices.create).be.calledWithMatch({ index: hiddenIndice, aliases: { [hiddenAlias]: {} }, settings: { @@ -5016,13 +5226,13 @@ describe("Test: ElasticSearch service", () => { }; global.NODE_ENV = "development"; - should(() => elasticsearch._checkMappings(mapping)).throw({ + should(() => elasticsearch.client._checkMappings(mapping)).throw({ message: 'Invalid mapping property "mappings.dinamic". Did you mean "dynamic"?', id: "services.storage.invalid_mapping", }); - should(() => elasticsearch._checkMappings(mapping2)).throw({ + should(() => elasticsearch.client._checkMappings(mapping2)).throw({ message: 'Invalid mapping property "mappings.type".', id: "services.storage.invalid_mapping", }); @@ -5043,14 +5253,14 @@ describe("Test: ElasticSearch service", () => { }; global.NODE_ENV = "development"; - should(() => elasticsearch._checkMappings(mapping)).throw({ + should(() => elasticsearch.client._checkMappings(mapping)).throw({ message: 'Invalid mapping property "mappings.properties.car.dinamic". Did you mean "dynamic"?', id: "services.storage.invalid_mapping", }); global.NODE_ENV = "production"; - should(() => elasticsearch._checkMappings(mapping)).throw({ + should(() => elasticsearch.client._checkMappings(mapping)).throw({ message: 'Invalid mapping property "mappings.properties.car.dinamic".', id: "services.storage.invalid_mapping", }); @@ -5072,7 +5282,7 @@ describe("Test: ElasticSearch service", () => { }, }; - should(() => elasticsearch._checkMappings(mapping)).not.throw(); + should(() => elasticsearch.client._checkMappings(mapping)).not.throw(); }); }); @@ -5087,14 +5297,19 @@ describe("Test: ElasticSearch service", () => { scopeEnum.PRIVATE, ); + sinon.stub(publicES.client, "waitForElasticsearch").resolves(); + sinon.stub(internalES.client, "waitForElasticsearch").resolves(); + publicES.client._client = new ESClientMock("8.0.0"); + internalES.client._client = new ESClientMock("8.0.0"); + await publicES.init(); await internalES.init(); }); describe("#_getAlias", () => { it("return alias name for a collection", () => { - const publicAlias = publicES._getAlias("nepali", "liia"); - const internalAlias = internalES._getAlias("nepali", "mehry"); + const publicAlias = publicES.client._getAlias("nepali", "liia"); + const internalAlias = internalES.client._getAlias("nepali", "mehry"); should(publicAlias).be.eql("@&nepali.liia"); should(internalAlias).be.eql("@%nepali.mehry"); @@ -5112,25 +5327,32 @@ describe("Test: ElasticSearch service", () => { privateBody = [ { alias: "@%nepali.mehry", index: "%nepalu.mehry", filter: 0 }, ]; - publicES._client.cat.aliases.resolves(publicBody); - internalES._client.cat.aliases.resolves(privateBody); + publicES.client._client.cat.aliases.resolves(publicBody); + internalES.client._client.cat.aliases.resolves(privateBody); - const publicIndice = await publicES._getIndice("nepali", "liia"); - const internalIndice = await internalES._getIndice("nepali", "mehry"); + const publicIndice = await publicES.client._getIndice("nepali", "liia"); + const internalIndice = await internalES.client._getIndice( + "nepali", + "mehry", + ); should(publicIndice).be.eql("&nepali.lia"); should(internalIndice).be.eql("%nepalu.mehry"); }); it("throw if there is no indice associated with the alias", async () => { - publicES._client.cat.aliases.resolves([]); - internalES._client.cat.aliases.resolves([]); + publicES.client._client.cat.aliases.resolves([]); + internalES.client._client.cat.aliases.resolves([]); - await should(publicES._getIndice("nepali", "liia")).be.rejectedWith({ + await should( + publicES.client._getIndice("nepali", "liia"), + ).be.rejectedWith({ id: "services.storage.unknown_index_collection", }); - await should(internalES._getIndice("nepali", "mehry")).be.rejectedWith({ + await should( + internalES.client._getIndice("nepali", "mehry"), + ).be.rejectedWith({ id: "services.storage.unknown_index_collection", }); }); @@ -5144,14 +5366,18 @@ describe("Test: ElasticSearch service", () => { { alias: "@%nepali.mehry", index: "%nepalu.mehry", filter: 0 }, { alias: "@%nepali.mehry", index: "%nepali.mehry", filter: 0 }, ]; - publicES._client.cat.aliases.resolves(publicBody); - internalES._client.cat.aliases.resolves(privateBody); + publicES.client._client.cat.aliases.resolves(publicBody); + internalES.client._client.cat.aliases.resolves(privateBody); - await should(publicES._getIndice("nepali", "liia")).be.rejectedWith({ + await should( + publicES.client._getIndice("nepali", "liia"), + ).be.rejectedWith({ id: "services.storage.multiple_indice_alias", }); - await should(internalES._getIndice("nepali", "mehry")).be.rejectedWith({ + await should( + internalES.client._getIndice("nepali", "mehry"), + ).be.rejectedWith({ id: "services.storage.multiple_indice_alias", }); }); @@ -5159,14 +5385,14 @@ describe("Test: ElasticSearch service", () => { describe("#_getAvailableIndice", () => { it("return simple indice whenever it is possible", async () => { - publicES._client.indices.exists.resolves(false); - internalES._client.indices.exists.resolves(false); + publicES.client._client.indices.exists.resolves(false); + internalES.client._client.indices.exists.resolves(false); - const publicIndice = await publicES._getAvailableIndice( + const publicIndice = await publicES.client._getAvailableIndice( "nepali", "liia", ); - const internalIndice = await internalES._getAvailableIndice( + const internalIndice = await internalES.client._getAvailableIndice( "nepali", "_kuzzle_keep", ); @@ -5176,27 +5402,26 @@ describe("Test: ElasticSearch service", () => { }); it("return a suffixed indice if necessary (indice already taken)", async () => { - publicES._client.indices.exists + publicES.client._client.indices.exists .onFirstCall() .resolves(true) .resolves(false); - internalES._client.indices.exists + internalES.client._client.indices.exists .onFirstCall() .resolves(true) .resolves(false); - const publicIndice = await publicES._getAvailableIndice( + const publicIndice = await publicES.client._getAvailableIndice( "nepali", "liia", ); - const internalIndice = await internalES._getAvailableIndice( + const internalIndice = await internalES.client._getAvailableIndice( "nepali", "mehry", ); - // Random suffix = 100000 because randomNumber has been mocked - should(publicIndice).match("&nepali.liia.100000"); - should(internalIndice).match("%nepali.mehry.100000"); + should(publicIndice).match(new RegExp("&nepali.liia\\.\\d+")); + should(internalIndice).match(new RegExp("%nepali.mehry\\.\\d+")); }); it("return a truncated and suffixed indice if necessary (indice + suffix too long)", async () => { @@ -5204,31 +5429,42 @@ describe("Test: ElasticSearch service", () => { "averyveryverylongindexwhichhasexactlythemaximumlengthacceptedofonehundredandtwentysixcharactersandthatiswaytoolongdontyouthink"; const longCollection = "averyverylongcollectionwhichhasexactlythemaximumlengthacceptedofonehundredandtwentysixcharactersandthatswaytoolongdontyouthink"; - publicES._client.indices.exists + publicES.client._client.indices.exists .onFirstCall() .resolves(true) .resolves(false); - internalES._client.indices.exists + internalES.client._client.indices.exists .onFirstCall() .resolves(true) .resolves(false); - const publicIndice = await publicES._getAvailableIndice( + const publicIndice = await publicES.client._getAvailableIndice( longIndex, longCollection, ); - const internalIndice = await internalES._getAvailableIndice( + const internalIndice = await internalES.client._getAvailableIndice( longIndex, longCollection, ); - // Random suffix = 100000 because randomNumber has been mocked + const publicIndiceCaptureSuffix = new RegExp(`(\\d+)`).exec( + publicIndice, + )[0].length; + const internalIndiceCaptureSuffix = new RegExp(`(\\d+)`).exec( + internalIndice, + )[0].length; + should(publicIndice).match( - `&${longIndex}.${longCollection.substr(0, 120)}.100000`, + new RegExp( + `&${longIndex}.${longCollection.substr(0, longCollection.length - publicIndiceCaptureSuffix)}\\.\\d+`, + ), ); should(internalIndice).match( - `%${longIndex}.${longCollection.substr(0, 120)}.100000`, + new RegExp( + `%${longIndex}.${longCollection.substr(0, longCollection.length - internalIndiceCaptureSuffix)}\\.\\d+`, + ), ); + // The indice should be truncated just enough, not more not less should(publicIndice).match( (value) => Buffer.from(value).length === 255, @@ -5258,12 +5494,13 @@ describe("Test: ElasticSearch service", () => { }, }, }; - publicES._client.indices.getAlias.resolves({ body: publicBody }); - internalES._client.indices.getAlias.resolves({ body: privateBody }); + publicES.client._client.indices.getAlias.resolves(publicBody); + internalES.client._client.indices.getAlias.resolves(privateBody); - const publicIndice = await publicES._getAliasFromIndice("&nepali.lia"); + const publicIndice = + await publicES.client._getAliasFromIndice("&nepali.lia"); const internalIndice = - await internalES._getAliasFromIndice("%nepalu.mehry"); + await internalES.client._getAliasFromIndice("%nepalu.mehry"); should(publicIndice).be.eql(["@&nepali.liia"]); should(internalIndice).be.eql(["@%nepali.mehry"]); @@ -5280,15 +5517,15 @@ describe("Test: ElasticSearch service", () => { aliases: {}, }, }; - publicES._client.indices.getAlias.resolves({ body: publicBody }); - internalES._client.indices.getAlias.resolves({ body: privateBody }); + publicES.client._client.indices.getAlias.resolves(publicBody); + internalES.client._client.indices.getAlias.resolves(privateBody); await should( - publicES._getAliasFromIndice("&nepali.lia"), + publicES.client._getAliasFromIndice("&nepali.lia"), ).be.rejectedWith({ id: "services.storage.unknown_index_collection" }); await should( - internalES._getAliasFromIndice("%nepalu.mehry"), + internalES.client._getAliasFromIndice("%nepalu.mehry"), ).be.rejectedWith({ id: "services.storage.unknown_index_collection" }); }); @@ -5309,15 +5546,15 @@ describe("Test: ElasticSearch service", () => { }, }, }; - publicES._client.indices.getAlias.resolves({ body: publicBody }); - internalES._client.indices.getAlias.resolves({ body: privateBody }); + publicES.client._client.indices.getAlias.resolves(publicBody); + internalES.client._client.indices.getAlias.resolves(privateBody); await should( - publicES._getAliasFromIndice("&nepali.lia"), + publicES.client._getAliasFromIndice("&nepali.lia"), ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); await should( - internalES._getAliasFromIndice("%nepalu.mehry"), + internalES.client._getAliasFromIndice("%nepalu.mehry"), ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); }); @@ -5338,36 +5575,38 @@ describe("Test: ElasticSearch service", () => { }, }, }; - publicES._client.indices.getAlias.resolves({ body: publicBody }); - internalES._client.indices.getAlias.resolves({ body: privateBody }); + publicES.client._client.indices.getAlias.resolves(publicBody); + internalES.client._client.indices.getAlias.resolves(privateBody); await should( - publicES._getAliasFromIndice("&nepali.lia"), + publicES.client._getAliasFromIndice("&nepali.lia"), ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); await should( - internalES._getAliasFromIndice("%nepalu.mehry"), + internalES.client._getAliasFromIndice("%nepalu.mehry"), ).not.be.rejectedWith({ id: "services.storage.multiple_indice_alias" }); }); }); describe("#_getWaitForActiveShards", () => { it("should return all if an Elasticsearch cluster is used", async () => { - elasticsearch._client.cat.nodes = sinon + elasticsearch.client._client.cat.nodes = sinon .stub() .resolves(["node1", "node2"]); const waitForActiveShards = - await elasticsearch._getWaitForActiveShards(); + await elasticsearch.client._getWaitForActiveShards(); should(waitForActiveShards).be.eql("all"); }); it("should return 1 if a single node Elasticsearch cluster is used", async () => { - elasticsearch._client.cat.nodes = sinon.stub().resolves(["node1"]); + elasticsearch.client._client.cat.nodes = sinon + .stub() + .resolves(["node1"]); const waitForActiveShards = - await elasticsearch._getWaitForActiveShards(); + await elasticsearch.client._getWaitForActiveShards(); should(waitForActiveShards).be.eql(1); }); @@ -5390,33 +5629,33 @@ describe("Test: ElasticSearch service", () => { ]; beforeEach(() => { - publicES._client.indices.updateAliases.resolves(); - internalES._client.indices.updateAliases.resolves(); + publicES.client._client.indices.updateAliases.resolves(); + internalES.client._client.indices.updateAliases.resolves(); - publicES._client.cat.indices.resolves(indicesBody); - internalES._client.cat.indices.resolves(indicesBody); + publicES.client._client.cat.indices.resolves(indicesBody); + internalES.client._client.cat.indices.resolves(indicesBody); - sinon.stub(publicES, "listAliases").resolves(aliasesList); - sinon.stub(internalES, "listAliases").resolves(aliasesList); + sinon.stub(publicES.client, "listAliases").resolves(aliasesList); + sinon.stub(internalES.client, "listAliases").resolves(aliasesList); }); afterEach(() => { - publicES.listAliases.restore(); - internalES.listAliases.restore(); + publicES.client.listAliases.restore(); + internalES.client.listAliases.restore(); }); it("Find indices without associated aliases and create some accordingly", async () => { - await publicES.generateMissingAliases(); - await internalES.generateMissingAliases(); + await publicES.client.generateMissingAliases(); + await internalES.client.generateMissingAliases(); - should(publicES._client.indices.updateAliases).be.calledWith({ + should(publicES.client._client.indices.updateAliases).be.calledWith({ body: { actions: [ { add: { alias: "@&nepali.mehry", index: "&nepali.mehry" } }, ], }, }); - should(internalES._client.indices.updateAliases).be.calledWith({ + should(internalES.client._client.indices.updateAliases).be.calledWith({ body: { actions: [ { add: { alias: "@%nepali.liia", index: "%nepali.liia" } }, @@ -5454,21 +5693,21 @@ describe("Test: ElasticSearch service", () => { }, ]; - publicES.listAliases.resolves(aliasesList); - internalES.listAliases.resolves(aliasesList); + publicES.client.listAliases.resolves(aliasesList); + internalES.client.listAliases.resolves(aliasesList); - await publicES.generateMissingAliases(); - await internalES.generateMissingAliases(); + await publicES.client.generateMissingAliases(); + await internalES.client.generateMissingAliases(); - should(publicES._client.indices.updateAliases).not.be.called(); - should(internalES._client.indices.updateAliases).not.be.called(); + should(publicES.client._client.indices.updateAliases).not.be.called(); + should(internalES.client._client.indices.updateAliases).not.be.called(); }); }); describe("#_extractIndex", () => { it("extract the index from alias", () => { - const publicIndex = publicES._extractIndex("@&nepali.liia"); - const internalIndex = internalES._extractIndex("@%nepali.liia"); + const publicIndex = publicES.client._extractIndex("@&nepali.liia"); + const internalIndex = internalES.client._extractIndex("@%nepali.liia"); should(publicIndex).be.eql("nepali"); should(internalIndex).be.eql("nepali"); @@ -5477,15 +5716,17 @@ describe("Test: ElasticSearch service", () => { describe("#_extractCollection", () => { it("extract the collection from alias", () => { - const publicCollection = publicES._extractCollection("@&nepali.liia"); + const publicCollection = + publicES.client._extractCollection("@&nepali.liia"); const publicCollection2 = - publicES._extractCollection("@&vietnam.lfiduras"); - const publicCollection3 = publicES._extractCollection("@&vietnam.l"); - const publicCollection4 = publicES._extractCollection( + publicES.client._extractCollection("@&vietnam.lfiduras"); + const publicCollection3 = + publicES.client._extractCollection("@&vietnam.l"); + const publicCollection4 = publicES.client._extractCollection( "@&vietnam.iamaverylongcollectionnamebecauseiworthit", ); const internalCollection = - internalES._extractCollection("@%nepali.liia"); + internalES.client._extractCollection("@%nepali.liia"); should(publicCollection).be.eql("liia"); should(publicCollection2).be.eql("lfiduras"); @@ -5509,8 +5750,8 @@ describe("Test: ElasticSearch service", () => { "@&vietnam._kuzzle_keep", ]; - const publicSchema = publicES._extractSchema(aliases); - const internalSchema = internalES._extractSchema(aliases); + const publicSchema = publicES.client._extractSchema(aliases); + const internalSchema = internalES.client._extractSchema(aliases); should(internalSchema).be.eql({ nepali: ["liia", "mehry"], @@ -5532,10 +5773,10 @@ describe("Test: ElasticSearch service", () => { "@&vietnam._kuzzle_keep", ]; - const publicSchema = publicES._extractSchema(aliases, { + const publicSchema = publicES.client._extractSchema(aliases, { includeHidden: true, }); - const internalSchema = internalES._extractSchema(aliases, { + const internalSchema = internalES.client._extractSchema(aliases, { includeHidden: true, }); @@ -5554,11 +5795,11 @@ describe("Test: ElasticSearch service", () => { it("should return the same query if all top level keywords are valid", () => { searchBody = {}; - for (const key of publicES.searchBodyKeys) { + for (const key of publicES.client.searchBodyKeys) { searchBody[key] = { foo: "bar" }; } - const result = publicES._sanitizeSearchBody( + const result = publicES.client._sanitizeSearchBody( Object.assign({}, searchBody), ); @@ -5570,7 +5811,7 @@ describe("Test: ElasticSearch service", () => { unknown: {}, }; - should(() => publicES._sanitizeSearchBody(searchBody)).throw( + should(() => publicES.client._sanitizeSearchBody(searchBody)).throw( BadRequestError, { id: "services.storage.invalid_search_query" }, ); @@ -5597,7 +5838,7 @@ describe("Test: ElasticSearch service", () => { }, }; - should(() => publicES._sanitizeSearchBody(searchBody)).throw( + should(() => publicES.client._sanitizeSearchBody(searchBody)).throw( BadRequestError, { id: "services.storage.invalid_query_keyword" }, ); @@ -5608,7 +5849,7 @@ describe("Test: ElasticSearch service", () => { query: {}, }; - const result = publicES._sanitizeSearchBody(searchBody); + const result = publicES.client._sanitizeSearchBody(searchBody); should(result).be.deepEqual({ query: { match_all: {} } }); }); @@ -5629,13 +5870,13 @@ describe("Test: ElasticSearch service", () => { }, }; - should(() => publicES._scriptCheck(searchParams)).not.throw(); + should(() => publicES.client._scriptCheck(searchParams)).not.throw(); }); it("should not throw when there is not a single script", () => { const searchParams = { foo: "bar" }; - should(() => publicES._scriptCheck(searchParams)).not.throw(); + should(() => publicES.client._scriptCheck(searchParams)).not.throw(); }); it("should throw if any script is found in the query", () => { @@ -5652,7 +5893,7 @@ describe("Test: ElasticSearch service", () => { }, }; - should(() => publicES._sanitizeSearchBody(searchParams)).throw( + should(() => publicES.client._sanitizeSearchBody(searchParams)).throw( BadRequestError, { id: "services.storage.invalid_query_keyword" }, ); @@ -5670,7 +5911,7 @@ describe("Test: ElasticSearch service", () => { }, }; - should(() => publicES._sanitizeSearchBody(searchParams)).throw( + should(() => publicES.client._sanitizeSearchBody(searchParams)).throw( BadRequestError, { id: "services.storage.invalid_query_keyword" }, ); @@ -5697,7 +5938,7 @@ describe("Test: ElasticSearch service", () => { }, }; - should(() => publicES._sanitizeSearchBody(searchParams)).throw( + should(() => publicES.client._sanitizeSearchBody(searchParams)).throw( BadRequestError, { id: "services.storage.invalid_query_keyword" }, ); diff --git a/test/service/storage/esWrapper.test.js b/test/service/storage/esWrapper-es7.test.js similarity index 98% rename from test/service/storage/esWrapper.test.js rename to test/service/storage/esWrapper-es7.test.js index 11662bbbd4..3e5189d562 100644 --- a/test/service/storage/esWrapper.test.js +++ b/test/service/storage/esWrapper-es7.test.js @@ -6,7 +6,7 @@ const { ExternalServiceError } = require("../../../index"); const ESClientMock = require("../../mocks/service/elasticsearchClient.mock"); const KuzzleMock = require("../../mocks/kuzzle.mock"); -const ESWrapper = require("../../../lib/service/storage/esWrapper"); +const ESWrapper = require("../../../lib/service/storage/7/esWrapper"); describe("Test: ElasticSearch Wrapper", () => { let kuzzle; diff --git a/test/service/storage/esWrapper-es8.test.js b/test/service/storage/esWrapper-es8.test.js new file mode 100644 index 0000000000..8fdfca1dec --- /dev/null +++ b/test/service/storage/esWrapper-es8.test.js @@ -0,0 +1,178 @@ +"use strict"; + +const should = require("should"); + +const { ExternalServiceError } = require("../../../index"); +const ESClientMock = require("../../mocks/service/elasticsearchClient.mock"); +const KuzzleMock = require("../../mocks/kuzzle.mock"); + +const ESWrapper = require("../../../lib/service/storage/8/esWrapper"); + +describe("Test: ElasticSearch Wrapper", () => { + let kuzzle; + const client = new ESClientMock(); + const esWrapper = new ESWrapper(client); + + beforeEach(() => { + kuzzle = new KuzzleMock(); + }); + + describe("#formatESError", () => { + it("should convert any unknown error to a ExternalServiceError instance", () => { + const error = new Error("test"); + error.meta = { + statusCode: 420, + }; + + const formatted = esWrapper.formatESError(error); + + should(formatted).be.instanceOf(ExternalServiceError); + should(formatted.id).be.eql("services.storage.unexpected_error"); + }); + + it("should handle version conflict errors", () => { + const error = new Error( + '[version_conflict_engine_exception] [data][AVrbg0eg90VMe4Z_dG8j]: version conflict, current version [153] is different than the one provided [152], with { index_uuid="iDrU6CfZSO6CghM1t6dl0A" & shard="2" & index="userglobaldata" }', + ); + error.meta = { + statusCode: 409, + }; + + const formatted = esWrapper.formatESError(error); + + should(formatted).be.instanceOf(ExternalServiceError); + should(formatted.id).be.eql("services.storage.too_many_changes"); + }); + + it("should handle already existing document", () => { + const error = new Error(""); + error.meta = { + body: { + error: { + reason: + "[liia]: version conflict, document already exists (current version [1])", + }, + }, + }; + + const formatted = esWrapper.formatESError(error); + + should(formatted).be.match({ + id: "services.storage.document_already_exists", + }); + }); + + it("should handle document not found", () => { + const error = new Error("test"); + error.meta = { statusCode: 404 }; + error.body = { + _index: "&nyc-open-data.yellow-taxi", + found: false, + _id: "mehry", + error: { + reason: "foo", + "resource.id": "bar", + }, + }; + + const formatted = esWrapper.formatESError(error); + + should(formatted).be.match({ + message: 'Document "mehry" not found in "nyc-open-data":"yellow-taxi".', + id: "services.storage.not_found", + }); + }); + + it("should handle unexpected not found", () => { + const error = new Error("test"); + error.meta = { statusCode: 404 }; + error.body = { + found: false, + _id: "mehry", + error: { + reason: "foo", + "resource.id": "bar", + }, + }; + + const formatted = esWrapper.formatESError(error); + + should(formatted).be.match({ + message: "test", + id: "services.storage.unexpected_not_found", + }); + }); + + it("should handle unknown DSL keyword", () => { + const error = new Error(""); + error.meta = { + body: { + error: { + reason: "[and] query malformed, no start_object after query name", + }, + }, + }; + + should(esWrapper.formatESError(error)).be.match({ + id: "services.storage.unknown_query_keyword", + }); + + error.meta = { + body: { error: { reason: "no [query] registered for [equals]" } }, + }; + + should(esWrapper.formatESError(error)).be.match({ + id: "services.storage.unknown_query_keyword", + }); + }); + + describe("logging in production", () => { + let nodeEnv; + + beforeEach(() => { + nodeEnv = global.NODE_ENV; + global.NODE_ENV = "production"; + }); + + afterEach(() => { + global.NODE_ENV = nodeEnv; + }); + + it("should emit the source error for easier support & debugging", () => { + kuzzle.emit.resetHistory(); + + const error = new Error("test"); + error.meta = { + statusCode: 420, + meta: { + request: { + oh: "noes", + }, + }, + }; + + esWrapper.formatESError(error); + + should(kuzzle.emit).be.calledWith("services:storage:error", { + message: `Elasticsearch Client error: ${error.message}`, + meta: error.meta, + stack: error.stack, + }); + }); + + it("should be able to log errors without meta", () => { + kuzzle.emit.resetHistory(); + + const error = new Error("test"); + + esWrapper.formatESError(error); + + should(kuzzle.emit).be.calledWith("services:storage:error", { + message: `Elasticsearch Client error: ${error.message}`, + meta: null, + stack: error.stack, + }); + }); + }); + }); +}); diff --git a/test/service/storage/queryTranslator.test.js b/test/service/storage/queryTranslator.test.js index aa1b414c82..1590401fcb 100644 --- a/test/service/storage/queryTranslator.test.js +++ b/test/service/storage/queryTranslator.test.js @@ -2,7 +2,7 @@ const should = require("should"); -const QueryTranslator = require("../../../lib/service/storage/queryTranslator"); +const QueryTranslator = require("../../../lib/service/storage/commons/queryTranslator"); describe("QueryTranslator", () => { const translator = new QueryTranslator(); From b03ec88851297223cca2302e9b774ca7dffca6e0 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 27 Feb 2024 09:58:03 +0100 Subject: [PATCH 20/59] style(elasticsearch 8): linting the application --- lib/service/storage/7/elasticsearch.ts | 148 ++++++++++++------------- lib/service/storage/8/elasticsearch.ts | 5 +- lib/service/storage/Elasticsearch.ts | 15 --- 3 files changed, 78 insertions(+), 90 deletions(-) diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts index d13ba7eb86..f21089b917 100644 --- a/lib/service/storage/7/elasticsearch.ts +++ b/lib/service/storage/7/elasticsearch.ts @@ -169,7 +169,7 @@ export class ES7 { "Your dynamic mapping policy is set to 'true' for new fields.", "Elasticsearch will try to automatically infer mapping for new fields, and those cannot be changed afterward.", 'See the "services.storageEngine.commonMapping.dynamic" option in the kuzzlerc configuration file to change this value.', - ].join("\n") + ].join("\n"), ); } this._client = new Client(this._config.client); @@ -190,7 +190,7 @@ export class ES7 { "services", "storage", "version_mismatch", - version.number + version.number, ); } @@ -329,14 +329,14 @@ export class ES7 { "services", "storage", "scroll_duration_too_great", - _scrollTTL + _scrollTTL, ); } } const stringifiedScrollInfo = await global.kuzzle.ask( "core:cache:internal:get", - cacheKey + cacheKey, ); if (!stringifiedScrollInfo) { @@ -361,7 +361,7 @@ export class ES7 { JSON.stringify(scrollInfo), { ttl: ms(_scrollTTL) || this.scrollTTL, - } + }, ); } @@ -403,7 +403,7 @@ export class ES7 { from?: number; size?: number; scroll?: string; - } = {} + } = {}, ) { let esIndexes: any; @@ -439,7 +439,7 @@ export class ES7 { "services", "storage", "scroll_duration_too_great", - scroll + scroll, ); } } @@ -463,7 +463,7 @@ export class ES7 { index, targets, }), - { ttl } + { ttl }, ); body.remaining = body.hits.total.value - body.hits.hits.length; @@ -563,7 +563,7 @@ export class ES7 { for (const [name, innerHit] of Object.entries(innerHits)) { formattedInnerHits[name] = await Bluebird.map( (innerHit as any).hits.hits, - formatHit + formatHit, ); } return formattedInnerHits; @@ -724,7 +724,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { assertIsObject(content); @@ -788,7 +788,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest = { body: content, @@ -852,7 +852,7 @@ export class ES7 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest: RequestParams.Update> = { _source: "true", @@ -918,7 +918,7 @@ export class ES7 { userId?: string; retryOnConflict?: number; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const esRequest: RequestParams.Update> = { _source: "true", @@ -991,7 +991,7 @@ export class ES7 { refresh?: boolean | "wait_for"; userId?: string; injectKuzzleMeta?: boolean; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -1024,7 +1024,7 @@ export class ES7 { "not_found", id, index, - collection + collection, ); } @@ -1060,7 +1060,7 @@ export class ES7 { refresh, }: { refresh?: boolean | "wait_for"; - } = {} + } = {}, ) { const esRequest = { id, @@ -1108,7 +1108,7 @@ export class ES7 { refresh?: boolean | "wait_for"; size?: number; fetch?: boolean; - } = {} + } = {}, ) { const esRequest: RequestParams.DeleteByQuery> = { body: this._sanitizeSearchBody({ query }), @@ -1170,7 +1170,7 @@ export class ES7 { }: { refresh?: boolean | "wait_for"; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -1240,7 +1240,7 @@ export class ES7 { refresh?: boolean | "wait_for"; size?: number; userId?: string; - } = {} + } = {}, ) { try { const esRequest = { @@ -1263,7 +1263,7 @@ export class ES7 { index, collection, documents, - { refresh, userId } + { refresh, userId }, ); return { @@ -1295,7 +1295,7 @@ export class ES7 { refresh = false, }: { refresh?: boolean; - } = {} + } = {}, ) { const script = { params: {}, @@ -1339,7 +1339,7 @@ export class ES7 { "storage", "incomplete_update", response.body.updated, - errors + errors, ); } @@ -1371,7 +1371,7 @@ export class ES7 { }: { size?: number; scrollTTl?: string; - } = {} + } = {}, ): Promise { const esRequest: RequestParams.Search = { body: this._sanitizeSearchBody({ query }), @@ -1397,7 +1397,7 @@ export class ES7 { esRequest, async function getMoreUntilDone( error, - { body: { hits, _scroll_id } } + { body: { hits, _scroll_id } }, ) { if (error) { reject(error); @@ -1417,12 +1417,12 @@ export class ES7 { scroll: esRequest.scroll, scroll_id: _scroll_id, }, - getMoreUntilDone + getMoreUntilDone, ); } else { resolve(results); } - } + }, ); }); } finally { @@ -1470,7 +1470,7 @@ export class ES7 { "storage", "index_already_exists", indexType, - index + index, ); } } @@ -1496,7 +1496,7 @@ export class ES7 { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {} + }: { mappings?: TypeMapping; settings?: Record } = {}, ) { this._assertValidIndexAndCollection(index, collection); @@ -1505,7 +1505,7 @@ export class ES7 { "services", "storage", "collection_reserved", - HIDDEN_COLLECTION + HIDDEN_COLLECTION, ); } @@ -1548,7 +1548,7 @@ export class ES7 { dynamic: mappings.dynamic || this._config.commonMapping.dynamic, properties: _.merge( mappings.properties, - this._config.commonMapping.properties + this._config.commonMapping.properties, ), }; @@ -1619,7 +1619,7 @@ export class ES7 { includeKuzzleMeta = false, }: { includeKuzzleMeta?: boolean; - } = {} + } = {}, ) { const indice = await this._getIndice(index, collection); const esRequest = { @@ -1660,7 +1660,7 @@ export class ES7 { { mappings = {}, settings = {}, - }: { mappings?: TypeMapping; settings?: Record } = {} + }: { mappings?: TypeMapping; settings?: Record } = {}, ) { const esRequest = { index: await this._getIndice(index, collection), @@ -1766,7 +1766,7 @@ export class ES7 { async updateMapping( index: string, collection: string, - mappings: TypeMapping = {} + mappings: TypeMapping = {}, ): Promise<{ dynamic: string; _meta: JSONObject; properties: JSONObject }> { const esRequest: RequestParams.IndicesPutMapping> = { body: {}, @@ -1797,7 +1797,7 @@ export class ES7 { const fullProperties = _.merge( collectionMappings.properties, - mappings.properties + mappings.properties, ); return { @@ -1901,7 +1901,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const dateNow = Date.now(); @@ -2063,7 +2063,7 @@ export class ES7 { for (const [index, collections] of Object.entries(schema)) { schema[index] = (collections as string[]).filter( - (c) => c !== HIDDEN_COLLECTION + (c) => c !== HIDDEN_COLLECTION, ); } @@ -2164,7 +2164,7 @@ export class ES7 { return request; }, - { index: [] } + { index: [] }, ); if (esRequest.index.length === 0) { @@ -2235,7 +2235,7 @@ export class ES7 { async exists( index: string, collection: string, - id: string + id: string, ): Promise { const esRequest: RequestParams.Exists = { id, @@ -2367,7 +2367,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2465,7 +2465,7 @@ export class ES7 { injectKuzzleMeta = true, limits = true, source = true, - }: KRequestParams = {} + }: KRequestParams = {}, ) { let kuzzleMeta = {}; @@ -2489,7 +2489,7 @@ export class ES7 { }; const { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta + kuzzleMeta, ); esRequest.body = []; @@ -2537,7 +2537,7 @@ export class ES7 { retryOnConflict = 0, timeout = undefined, userId = null, - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), toImport = [], @@ -2555,7 +2555,7 @@ export class ES7 { }, { rejected, extractedDocuments } = this._extractMDocuments( documents, - kuzzleMeta + kuzzleMeta, ); /** @@ -2638,7 +2638,7 @@ export class ES7 { retryOnConflict?: number; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection); const esRequest = { @@ -2670,7 +2670,7 @@ export class ES7 { { prepareMUpsert: true, requireId: true, - } + }, ); /** @@ -2692,7 +2692,7 @@ export class ES7 { { doc: extractedDocuments[i]._source.changes, upsert: extractedDocuments[i]._source.default, - } + }, ); // _source: true // Makes ES return the updated document source in the response. @@ -2703,7 +2703,7 @@ export class ES7 { const response = await this._mExecute( esRequest, extractedDocuments, - rejected + rejected, ); // with _source: true, ES returns the updated document in @@ -2745,7 +2745,7 @@ export class ES7 { refresh?: boolean | "wait_for"; timeout?: string; userId?: string; - } = {} + } = {}, ) { const alias = this._getAlias(index, collection), kuzzleMeta = { @@ -2835,7 +2835,7 @@ export class ES7 { }: { refresh?: boolean | "wait_for"; timeout?: number; - } = {} + } = {}, ) { const query = { ids: { values: [] } }; const validIds = []; @@ -2913,7 +2913,7 @@ export class ES7 { esRequest: RequestParams.Bulk, documents: JSONObject[], partialErrors: JSONObject[] = [], - { limits = true, source = true } = {} + { limits = true, source = true } = {}, ) { assertWellFormedRefresh(esRequest); @@ -2995,7 +2995,7 @@ export class ES7 { _extractMDocuments( documents: JSONObject[], metadata: JSONObject, - { prepareMGet = false, requireId = false, prepareMUpsert = false } = {} + { prepareMGet = false, requireId = false, prepareMUpsert = false } = {}, ) { const rejected = []; const extractedDocuments = []; @@ -3044,7 +3044,7 @@ export class ES7 { metadata, document, extractedDocuments, - documentsToGet + documentsToGet, ); } } @@ -3066,7 +3066,7 @@ export class ES7 { metadata: JSONObject, document: JSONObject, extractedDocuments: JSONObject[], - documentsToGet: JSONObject[] + documentsToGet: JSONObject[], ) { let extractedDocument; @@ -3079,7 +3079,7 @@ export class ES7 { {}, metadata.upsert, document.changes, - document.default + document.default, ), }, }; @@ -3126,7 +3126,7 @@ export class ES7 { "storage", "invalid_mapping", currentPath, - didYouMean(property, mappingProperties) + didYouMean(property, mappingProperties), ); } @@ -3188,7 +3188,7 @@ export class ES7 { "storage", "multiple_indice_alias", `"alias" starting with "${ALIAS_PREFIX}"`, - '"indices"' + '"indices"', ); } @@ -3203,7 +3203,7 @@ export class ES7 { * @private */ async _getSettings( - esRequest: RequestParams.IndicesGetSettings + esRequest: RequestParams.IndicesGetSettings, ): Promise { const response = await this._client.indices.getSettings(esRequest); const index = esRequest.index as string; @@ -3222,10 +3222,10 @@ export class ES7 { */ async _getAvailableIndice( index: string, - collection: string + collection: string, ): Promise { let indice = this._getAlias(index, collection).substring( - INDEX_PREFIX_POSITION_IN_ALIAS + INDEX_PREFIX_POSITION_IN_ALIAS, ); if (!(await this._client.indices.exists({ index: indice })).body) { @@ -3266,7 +3266,7 @@ export class ES7 { async _getAliasFromIndice(indice: string) { const { body } = await this._client.indices.getAlias({ index: indice }); const aliases = Object.keys(body[indice].aliases).filter((alias) => - alias.startsWith(ALIAS_PREFIX) + alias.startsWith(ALIAS_PREFIX), ); if (aliases.length < 1) { @@ -3292,7 +3292,7 @@ export class ES7 { const indicesWithoutAlias = indices.filter( (indice) => indice[INDEX_PREFIX_POSITION_IN_INDICE] === this._indexPrefix && - !aliases.some((alias) => alias.indice === indice) + !aliases.some((alias) => alias.indice === indice), ); const esRequest = { body: { actions: [] } }; @@ -3326,7 +3326,7 @@ export class ES7 { "services", "storage", "invalid_collection_name", - collection + collection, ); } } @@ -3341,7 +3341,7 @@ export class ES7 { _extractIndex(alias) { return alias.substr( INDEX_PREFIX_POSITION_IN_ALIAS + 1, - alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1 + alias.indexOf(NAME_SEPARATOR) - INDEX_PREFIX_POSITION_IN_ALIAS - 1, ); } @@ -3459,7 +3459,7 @@ export class ES7 { * @returns {Promise.} resolve to an array of documents */ async _getAllDocumentsFromQuery( - esRequest: RequestParams.Search> + esRequest: RequestParams.Search>, ) { let { body: { hits, _scroll_id }, @@ -3486,7 +3486,7 @@ export class ES7 { hits.hits.map((h: JSONObject) => ({ _id: h._id, _source: h._source, - })) + })), ); } @@ -3537,7 +3537,7 @@ export class ES7 { "services", "storage", "invalid_query_keyword", - `${key}.${scriptArg}` + `${key}.${scriptArg}`, ); } } @@ -3592,14 +3592,14 @@ export class ES7 { assert( typeof configValue === "string", - `services.storageEngine.${key} must be a string.` + `services.storageEngine.${key} must be a string.`, ); const parsedValue = ms(configValue); assert( typeof parsedValue === "number", - `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").` + `Invalid parsed value from ms() for services.storageEngine.${key} ("${typeof parsedValue}").`, ); return parsedValue; @@ -3652,7 +3652,7 @@ export class ES7 { esState = esStateEnum.OK; } else { global.kuzzle.log.info( - `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining` + `[ℹ] Still waiting for Elasticsearch: ${health.body.number_of_pending_tasks} cluster tasks remaining`, ); await Bluebird.delay(1000); } @@ -3677,7 +3677,7 @@ export class ES7 { "storage", "invalid_mapping", path, - "Dynamic property value should be a string." + "Dynamic property value should be a string.", ); } @@ -3688,8 +3688,8 @@ export class ES7 { "invalid_mapping", path, `Incorrect dynamic property value (${value}). Should be one of "${DYNAMIC_PROPERTY_VALUES.join( - '", "' - )}"` + '", "', + )}"`, ); } } @@ -3698,7 +3698,7 @@ export class ES7 { _setLastActionToKuzzleMeta( esRequest: JSONObject, alias: string, - kuzzleMeta: JSONObject + kuzzleMeta: JSONObject, ) { /** * @warning Critical code section @@ -3792,7 +3792,7 @@ function assertWellFormedRefresh(esRequest) { "storage", "invalid_argument", "refresh", - '"wait_for", false' + '"wait_for", false', ); } } diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index 3bda2252bc..a8aea091a0 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -183,7 +183,10 @@ export class ES8 { const { version } = await this._client.info(); - if (version && !semver.satisfies(semver.coerce(version.number), ">=8.0.0")) { + if ( + version && + !semver.satisfies(semver.coerce(version.number), ">=8.0.0") + ) { throw kerror.get( "services", "storage", diff --git a/lib/service/storage/Elasticsearch.ts b/lib/service/storage/Elasticsearch.ts index fcfb70361c..0cba407c54 100644 --- a/lib/service/storage/Elasticsearch.ts +++ b/lib/service/storage/Elasticsearch.ts @@ -7,17 +7,6 @@ import { ES8 } from "./8/elasticsearch"; import Service from "../service"; import scopeEnum from "../../core/storage/storeScopeEnum"; -function printWarning() { - /* eslint-disable */ - console.warn( - "Elasticsearch 7 is deprecated and will be removed in the next major release." - ); - console.warn("Please consider upgrading your Elasticsearch version."); - console.warn("Update your configuration to set 'majorVersion' to 8."); - console.warn("Under the key service.storageEngine.majorVersion"); - /* eslint-disable */ -} - export class Elasticsearch extends Service { public client: any; @@ -25,9 +14,6 @@ export class Elasticsearch extends Service { super("elasticsearch", config); if (config.majorVersion === 7) { - if (scope === scopeEnum.PUBLIC) { - // printWarning(); - } this.client = new ES7(config, scope); } else if (config.majorVersion === 8) { this.client = new ES8(config, scope); @@ -43,7 +29,6 @@ export class Elasticsearch extends Service { switch (version) { case 7: - // printWarning(); return new ClientES7(config); case 8: return new ClientES8(config); From 1b4550569c133fbfca1988711bbd21501b95bb60 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 27 Feb 2024 10:18:09 +0100 Subject: [PATCH 21/59] refactor(elasticsearch 8): refactoring deprecated methods --- lib/service/storage/7/elasticsearch.ts | 4 ++-- lib/service/storage/8/elasticsearch.ts | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts index f21089b917..f9daa429b9 100644 --- a/lib/service/storage/7/elasticsearch.ts +++ b/lib/service/storage/7/elasticsearch.ts @@ -3366,12 +3366,12 @@ export class ES7 { * * @returns {Object.} Indexes as key and an array of their collections as value */ - _extractSchema(aliases, { includeHidden = false } = {}) { + _extractSchema(aliases: string[], { includeHidden = false } = {}) { const schema = {}; for (const alias of aliases) { const [indexName, collectionName] = alias - .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) + .slice(INDEX_PREFIX_POSITION_IN_ALIAS + 1) .split(NAME_SEPARATOR); if ( diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index a8aea091a0..7b65bd17cb 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -3369,10 +3369,11 @@ export class ES8 { */ _extractSchema(aliases: string[], { includeHidden = false } = {}) { const schema = {}; + console.log(aliases); for (const alias of aliases) { const [indexName, collectionName] = alias - .substr(INDEX_PREFIX_POSITION_IN_ALIAS + 1, alias.length) + .slice(INDEX_PREFIX_POSITION_IN_ALIAS + 1) .split(NAME_SEPARATOR); if ( From 929adef870a134bc7d7815c45300904fb99f95e3 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 27 Feb 2024 11:09:49 +0100 Subject: [PATCH 22/59] chore(elasticsearch 8): linting the app --- lib/service/storage/8/elasticsearch.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index 7b65bd17cb..bd60064659 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -3369,7 +3369,6 @@ export class ES8 { */ _extractSchema(aliases: string[], { includeHidden = false } = {}) { const schema = {}; - console.log(aliases); for (const alias of aliases) { const [indexName, collectionName] = alias From ea9b9a6ec3f5045743a81e63fd05d5382e645c77 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 15:33:58 +0100 Subject: [PATCH 23/59] test(pipeline): test against Elasticsearch 7 and 8 --- .ci/scripts/run-monkey-tests.sh | 34 +++++---- .ci/scripts/run-test-cluster.sh | 24 ++++--- .ci/services-7.yml | 31 ++++++++ .ci/{services.yml => services-8.yml} | 1 + .ci/test-cluster-7.yml | 72 +++++++++++++++++++ .ci/{test-cluster.yml => test-cluster-8.yml} | 1 + .../actions/build-and-run-kuzzle/action.yml | 11 ++- .github/actions/functional-tests/action.yml | 1 + .github/actions/monkey-tests/action.yml | 1 + .github/workflows/workflow.yaml | 43 +++++------ lib/config/default.config.ts | 2 +- lib/service/storage/Elasticsearch.ts | 14 ++-- ...StorageEngineElasticsearchConfiguration.ts | 4 +- 13 files changed, 182 insertions(+), 57 deletions(-) create mode 100644 .ci/services-7.yml rename .ci/{services.yml => services-8.yml} (94%) create mode 100644 .ci/test-cluster-7.yml rename .ci/{test-cluster.yml => test-cluster-8.yml} (97%) diff --git a/.ci/scripts/run-monkey-tests.sh b/.ci/scripts/run-monkey-tests.sh index f24949897f..ad52131a06 100755 --- a/.ci/scripts/run-monkey-tests.sh +++ b/.ci/scripts/run-monkey-tests.sh @@ -1,32 +1,30 @@ -#!/bin/bash - -set -ex +echo "Testing Kuzzle against node v$NODE_VERSION" -if [ -z "$NODE_VERSION" ]; -then - echo "Missing NODE_VERSION, use default NODE_20_VERSION" - export NODE_VERSION=$NODE_20_VERSION +if [ "$ES_VERSION" == "7" ]; then + YML_FILE='./.ci/test-cluster-7.yml' +elif [ "$ES_VERSION" == "8" ]; then + YML_FILE='./.ci/test-cluster-8.yml' +else + echo "Invalid ES_VERSION. It should be either '7' or '8'." + exit 1 fi -echo "Testing Kuzzle against node v$NODE_VERSION" - -docker compose -f ./.ci/test-cluster.yml down -v +docker compose -f $YML_FILE down -v echo "Installing dependencies..." -docker compose -f ./.ci/test-cluster.yml run --rm kuzzle_node_1 npm ci +docker compose -f $YML_FILE run --rm kuzzle_node_1 npm ci -if [ "$REBUILD" == "true" ]; -then - docker compose -f ./.ci/test-cluster.yml run --rm kuzzle_node_1 npm rebuild +if [ "$REBUILD" == "true" ]; then + docker compose -f $YML_FILE run --rm kuzzle_node_1 npm rebuild fi -docker compose -f ./.ci/test-cluster.yml run --rm kuzzle_node_1 npm run build +docker compose -f $YML_FILE run --rm kuzzle_node_1 npm run build echo "[$(date)] - Starting Kuzzle Cluster..." -trap 'docker compose -f ./.ci/test-cluster.yml logs' err +trap 'docker compose -f $YML_FILE logs' err -docker compose -f ./.ci/test-cluster.yml up -d +docker compose -f $YML_FILE up -d # don't wait on 7512: nginx will accept connections far before Kuzzle does KUZZLE_PORT=17510 ./bin/wait-kuzzle @@ -39,4 +37,4 @@ echo "Installing Kuzzle Monkey Tester..." cd kuzzle-monkey-tests npm ci -node index.js +node index.js \ No newline at end of file diff --git a/.ci/scripts/run-test-cluster.sh b/.ci/scripts/run-test-cluster.sh index 42e30d2b63..cdd7a6059c 100755 --- a/.ci/scripts/run-test-cluster.sh +++ b/.ci/scripts/run-test-cluster.sh @@ -2,7 +2,6 @@ set -ex - if [ -z "$NODE_VERSION" ]; then echo "Missing NODE_VERSION, use default NODE_20_VERSION" @@ -11,23 +10,32 @@ fi echo "Testing Kuzzle against node v$NODE_VERSION" -docker compose -f ./.ci/test-cluster.yml down -v +if [ "$ES_VERSION" == "7" ]; then + YML_FILE='./.ci/test-cluster-7.yml' +elif [ "$ES_VERSION" == "8" ]; then + YML_FILE='./.ci/test-cluster-8.yml' +else + echo "Invalid ES_VERSION. It should be either '7' or '8'." + exit 1 +fi + +docker compose -f $YML_FILE down -v echo "Installing dependencies..." -docker compose -f ./.ci/test-cluster.yml run --rm kuzzle_node_1 npm ci +docker compose -f $YML_FILE run --rm kuzzle_node_1 npm ci if [ "$REBUILD" == "true" ]; then - docker compose -f ./.ci/test-cluster.yml run --rm kuzzle_node_1 npm rebuild + docker compose -f $YML_FILE run --rm kuzzle_node_1 npm rebuild fi -docker compose -f ./.ci/test-cluster.yml run --rm kuzzle_node_1 npm run build +docker compose -f $YML_FILE run --rm kuzzle_node_1 npm run build echo "[$(date)] - Starting Kuzzle Cluster..." -trap 'docker compose -f ./.ci/test-cluster.yml logs' err +trap 'docker compose -f $YML_FILE logs' err -docker compose -f ./.ci/test-cluster.yml up -d +docker compose -f $YML_FILE up -d # don't wait on 7512: nginx will accept connections far before Kuzzle does KUZZLE_PORT=17510 ./bin/wait-kuzzle @@ -36,4 +44,4 @@ KUZZLE_PORT=17512 ./bin/wait-kuzzle trap - err -npm run $KUZZLE_FUNCTIONAL_TESTS +npm run $KUZZLE_FUNCTIONAL_TESTS \ No newline at end of file diff --git a/.ci/services-7.yml b/.ci/services-7.yml new file mode 100644 index 0000000000..f191b2e2e4 --- /dev/null +++ b/.ci/services-7.yml @@ -0,0 +1,31 @@ +version: '3' + +services: + kuzzle: + build: + dockerfile: ./docker/images/${KUZZLE_IMAGE}/Dockerfile + context: .. + command: ["kuzzle", "start"] + cap_add: + - SYS_PTRACE + ulimits: + nofile: 65536 + sysctls: + - net.core.somaxconn=8192 + environment: + - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 + - kuzzle_services__internalCache__node__host=redis + - kuzzle_services__memoryStorage__node__host=redis + depends_on: + - redis + - elasticsearch + ports: + - "7512:7512" + + redis: + image: redis:6 + + elasticsearch: + image: kuzzleio/elasticsearch:7 + ulimits: + nofile: 65536 diff --git a/.ci/services.yml b/.ci/services-8.yml similarity index 94% rename from .ci/services.yml rename to .ci/services-8.yml index 2ef4d93014..102ab3892c 100644 --- a/.ci/services.yml +++ b/.ci/services-8.yml @@ -16,6 +16,7 @@ services: - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__internalCache__node__host=redis - kuzzle_services__memoryStorage__node__host=redis + - kuzzle_services__storageEngine__majorVersion=8 depends_on: - redis - elasticsearch diff --git a/.ci/test-cluster-7.yml b/.ci/test-cluster-7.yml new file mode 100644 index 0000000000..9fe99e878a --- /dev/null +++ b/.ci/test-cluster-7.yml @@ -0,0 +1,72 @@ +version: '3.4' + +x-kuzzle-config: &kuzzle-config + image: kuzzleio/kuzzle-runner:${NODE_VERSION:-20} + command: > + bash -c " + node ./docker/scripts/start-kuzzle-test.js --enable-plugins kuzzle-plugin-cluster,functional-test-plugin + " + volumes: + - "..:/var/app" + cap_add: + - SYS_PTRACE + ulimits: + nofile: 65536 + sysctls: + - net.core.somaxconn=8192 + depends_on: + - redis + - elasticsearch + environment: + - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 + - kuzzle_services__storageEngine__commonMapping__dynamic=true + - kuzzle_services__internalCache__node__host=redis + - kuzzle_services__memoryStorage__node__host=redis + - kuzzle_server__protocols__mqtt__enabled=true + - kuzzle_server__protocols__mqtt__developmentMode=false + - kuzzle_http__accessControlAllowOrigin=localhost + - kuzzle_limits__loginsPerSecond=50 + - kuzzle_server__protocols__http__additionalContentTypes=*json:["application/x-yaml"] + - NODE_ENV=development + - NODE_VERSION=${NODE_VERSION:-20} + - DEBUG=none + - SECRETS_FILE_PREFIX=/var/app/ + # cluster + - kuzzle_plugins__cluster__privileged=true + - kuzzle_plugins__cluster__minimumNodes=3 + +services: + nginx: + image: nginx:1.19-alpine + depends_on: + - kuzzle_node_1 + - kuzzle_node_2 + - kuzzle_node_3 + ports: + - "7512:7512" + volumes: + - ../docker/nginx-dev:/etc/nginx/conf.d + + kuzzle_node_1: + <<: *kuzzle-config + ports: + - "17510:7512" + - "1883:1883" + + kuzzle_node_2: + <<: *kuzzle-config + ports: + - "17511:7512" + + kuzzle_node_3: + <<: *kuzzle-config + ports: + - "17512:7512" + + redis: + image: redis:6 + + elasticsearch: + image: kuzzleio/elasticsearch:7 + ulimits: + nofile: 65536 diff --git a/.ci/test-cluster.yml b/.ci/test-cluster-8.yml similarity index 97% rename from .ci/test-cluster.yml rename to .ci/test-cluster-8.yml index ae0dc6467a..e400334fcd 100644 --- a/.ci/test-cluster.yml +++ b/.ci/test-cluster-8.yml @@ -18,6 +18,7 @@ x-kuzzle-config: &kuzzle-config - redis - elasticsearch environment: + - kuzzle_services__storageEngine__majorVersion=8 - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__storageEngine__commonMapping__dynamic=true - kuzzle_services__internalCache__node__host=redis diff --git a/.github/actions/build-and-run-kuzzle/action.yml b/.github/actions/build-and-run-kuzzle/action.yml index 61ed488d62..4a7408f7f1 100644 --- a/.github/actions/build-and-run-kuzzle/action.yml +++ b/.github/actions/build-and-run-kuzzle/action.yml @@ -24,7 +24,16 @@ runs: shell: bash - run: | - trap 'docker compose -f ./.ci/services.yml logs' err + if [ "$ES_VERSION" == "7" ]; then + YML_FILE='./.ci/services-7.yml' + elif [ "$ES_VERSION" == "8" ]; then + YML_FILE='./.ci/services-8.yml' + else + echo "Invalid ES_VERSION. It should be either '7' or '8'." + exit 1 + fi + + trap 'docker compose -f $YML_FILE logs' err export MAX_TRIES=60 ./bin/wait-kuzzle diff --git a/.github/actions/functional-tests/action.yml b/.github/actions/functional-tests/action.yml index bbe78f7c0d..015293936f 100644 --- a/.github/actions/functional-tests/action.yml +++ b/.github/actions/functional-tests/action.yml @@ -18,4 +18,5 @@ runs: env: KUZZLE_FUNCTIONAL_TESTS: ${{ inputs.test-set }} NODE_VERSION: ${{ inputs.node-version }} + ES_VERSION: ${{ inputs.es-version }} shell: bash diff --git a/.github/actions/monkey-tests/action.yml b/.github/actions/monkey-tests/action.yml index 1d5a65e788..d43902adc6 100644 --- a/.github/actions/monkey-tests/action.yml +++ b/.github/actions/monkey-tests/action.yml @@ -11,4 +11,5 @@ runs: - run: ./.ci/scripts/run-monkey-tests.sh env: NODE_VERSION: ${{ inputs.node-version }} + ES_VERSION: ${{ inputs.es-version }} shell: bash diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index b388b933ce..e74e342220 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -1,4 +1,4 @@ -name: Run tests +name: Tests on: push: @@ -11,6 +11,8 @@ env: NODE_LTS_MAINTENANCE_VERSION: "16" NODE_LTS_ACTIVE_VERSION: "18" NODE_LTS_CURRENT_VERSION: "20" + ELASTICSEARCH_MAINTENANCE_VERSION: "7" + ELASTICSEARCH_ACTIVE_VERSION: "8" jobs: prepare-matrix: @@ -20,11 +22,12 @@ jobs: - id: set-matrix run: | echo "matrix={\"node-version\": [\"$NODE_LTS_MAINTENANCE_VERSION\", \"$NODE_LTS_ACTIVE_VERSION\", \"$NODE_LTS_CURRENT_VERSION\"]}" >> $GITHUB_OUTPUT + echo "es-matrix={\"es-version\": [\"$ELASTICSEARCH_MAINTENANCE_VERSION\", \"ELASTICSEARCH_ACTIVE_VERSION\"]} >> $GITHUB_OUTPUT + echo "test-set={\"test-set\":[\"jest\", \"http\", \"websocket\", \"legacy:mqtt\", \"legacy:http\", \"legacy:websocket\"]}" >> $GITHUB_OUTPUT outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} - node_lts_maintenance_version: ${{ env.NODE_LTS_MAINTENANCE_VERSION }} - node_lts_active_version: ${{ env.NODE_LTS_ACTIVE_VERSION }} - node_lts_current_version: ${{ env.NODE_LTS_CURRENT_VERSION }} + es-matrix: ${{ steps.set-matrix.outputs.es-matrix }} + test-set: ${{ steps.set-matrix.outputs.test-set }} error-codes-check: name: Documentation - Error codes check @@ -67,7 +70,7 @@ jobs: run: npm ci shell: bash - - name: Run ESLint + - name: ESLint run: npm run test:lint shell: bash @@ -90,7 +93,7 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: Run unit test using Node ${{ matrix.node-version }} + - name: Unit test using Node ${{ matrix.node-version }} uses: ./.github/actions/unit-tests env: NODE_VERSION: ${{ matrix.node-version }} @@ -106,6 +109,7 @@ jobs: strategy: matrix: kuzzle-image: ["kuzzle"] + es-version: ${{ fromJson(needs.prepare-matrix.outputs.es-matrix).es-version }} steps: - name: Checkout project uses: actions/checkout@v3 @@ -114,22 +118,16 @@ jobs: uses: ./.github/actions/build-and-run-kuzzle with: KUZZLE_IMAGE: ${{ matrix.kuzzle-image }} + es-version: ${{ matrix.es-version }} functional-tests: name: Functional tests needs: [unit-tests, prepare-matrix] strategy: matrix: - test_set: - [ - jest, - http, - websocket, - "legacy:mqtt", - "legacy:http", - "legacy:websocket", - ] + test-set: ${{ fromJson(needs.prepare-matrix.outputs.test-set).test-set }} node-version: ${{ fromJson(needs.prepare-matrix.outputs.matrix).node-version }} + es-version: ${{ fromJson(needs.prepare-matrix.outputs.es-matrix).es-version }} runs-on: ubuntu-22.04 steps: - name: Checkout project @@ -143,11 +141,12 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: test suit ${{ matrix.test_set }} + - name: Test suit ${{ matrix.test-set }} uses: ./.github/actions/functional-tests with: - test-set: test:functional:${{ matrix.test_set }} + test-set: test:functional:${{ matrix.test-set }} node-version: ${{ matrix.node-version }} + es-version: ${{ matrix.es-version }} cluster-monkey-tests: name: Cluster Monkey Tests @@ -156,6 +155,7 @@ jobs: strategy: matrix: node-version: ${{ fromJson(needs.prepare-matrix.outputs.matrix).node-version }} + es-version: ${{ fromJson(needs.prepare-matrix.outputs.es-matrix).es-version }} steps: - name: Checkout project uses: actions/checkout@v3 @@ -174,10 +174,11 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: Run monkey testing tests suit + - name: Monkey testing uses: ./.github/actions/monkey-tests with: node-version: ${{ matrix.node-version }} + es-version: ${{matrix.es-version}} deploy-workflow: name: Deployment Workflow @@ -185,6 +186,6 @@ jobs: uses: ./.github/workflows/workflow-deployments.yml secrets: inherit with: - node_lts_maintenance_version: ${{ needs.prepare-matrix.outputs.node_lts_maintenance_version }} - node_lts_active_version: ${{ needs.prepare-matrix.outputs.node_lts_active_version }} - node_lts_current_version: ${{ needs.prepare-matrix.outputs.node_lts_current_version }} + node_lts_maintenance_version: ${{ env.NODE_LTS_MAINTENANCE_VERSION }} + node_lts_active_version: ${{ env.NODE_LTS_ACTIVE_VERSION }} + node_lts_current_version: ${{ env.NODE_LTS_CURRENT_VERSION }} diff --git a/lib/config/default.config.ts b/lib/config/default.config.ts index 557734e2d1..399b45b512 100644 --- a/lib/config/default.config.ts +++ b/lib/config/default.config.ts @@ -284,7 +284,7 @@ const defaultConfig: KuzzleConfiguration = { bootstrapLockTimeout: 60000, }, storageEngine: { - majorVersion: 7, + majorVersion: "7", aliases: ["storageEngine"], backend: "elasticsearch", client: { diff --git a/lib/service/storage/Elasticsearch.ts b/lib/service/storage/Elasticsearch.ts index 0cba407c54..a8d3071829 100644 --- a/lib/service/storage/Elasticsearch.ts +++ b/lib/service/storage/Elasticsearch.ts @@ -13,24 +13,26 @@ export class Elasticsearch extends Service { constructor(config: any, scope = scopeEnum.PUBLIC) { super("elasticsearch", config); - if (config.majorVersion === 7) { + console.log(config) + + if (config.majorVersion === "7") { this.client = new ES7(config, scope); - } else if (config.majorVersion === 8) { + } else if (config.majorVersion === "8") { this.client = new ES8(config, scope); } else { throw new Error("Invalid Elasticsearch version."); } } - static buildClient(config: any, version?: 7 | 8): any { + static buildClient(config: any, version?: "7" | "8"): any { if (!version) { - version = 7; + version = "7"; } switch (version) { - case 7: + case "7": return new ClientES7(config); - case 8: + case "8": return new ClientES8(config); default: throw new Error("Invalid Elasticsearch version."); diff --git a/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts b/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts index c810429371..eb4f9ad3f6 100644 --- a/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts +++ b/lib/types/config/storageEngine/StorageEngineElasticsearchConfiguration.ts @@ -1,9 +1,9 @@ export type StorageEngineElasticsearch = { /** * Elasticsearch major version - * @default 7 + * @default "7" */ - majorVersion: 7 | 8; + majorVersion: "7" | "8"; /** * @default ['storageEngine'] */ From 16fa78863648e28f9173d47db6a5dfe85564a396 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 15:43:44 +0100 Subject: [PATCH 24/59] chore(pipeline): typo inside the workflow.yaml --- .github/workflows/workflow.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index e74e342220..91f4f60da4 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -186,6 +186,6 @@ jobs: uses: ./.github/workflows/workflow-deployments.yml secrets: inherit with: - node_lts_maintenance_version: ${{ env.NODE_LTS_MAINTENANCE_VERSION }} + node_lts_maintenance_version: ${{ env.NODE_LTS_MAINTENANCE_VERSION }} node_lts_active_version: ${{ env.NODE_LTS_ACTIVE_VERSION }} node_lts_current_version: ${{ env.NODE_LTS_CURRENT_VERSION }} From a429884112673b6da3bcec2bfd2431614de5f7d2 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 15:53:05 +0100 Subject: [PATCH 25/59] chore(pipeline): fix an issue in workflow.yaml --- .github/workflows/workflow.yaml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 91f4f60da4..3c103d8ae7 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -28,6 +28,9 @@ jobs: matrix: ${{ steps.set-matrix.outputs.matrix }} es-matrix: ${{ steps.set-matrix.outputs.es-matrix }} test-set: ${{ steps.set-matrix.outputs.test-set }} + node_lts_maintenance_version: ${{ env.NODE_LTS_MAINTENANCE_VERSION }} + node_lts_active_version: ${{ env.NODE_LTS_ACTIVE_VERSION }} + node_lts_current_version: ${{ env.NODE_LTS_CURRENT_VERSION }} error-codes-check: name: Documentation - Error codes check @@ -186,6 +189,6 @@ jobs: uses: ./.github/workflows/workflow-deployments.yml secrets: inherit with: - node_lts_maintenance_version: ${{ env.NODE_LTS_MAINTENANCE_VERSION }} - node_lts_active_version: ${{ env.NODE_LTS_ACTIVE_VERSION }} - node_lts_current_version: ${{ env.NODE_LTS_CURRENT_VERSION }} + node_lts_maintenance_version: ${{ needs.prepare-matrix.outputs.node_lts_maintenance_version }} + node_lts_active_version: ${{ needs.prepare-matrix.outputs.node_lts_active_version }} + node_lts_current_version: ${{ needs.prepare-matrix.outputs.node_lts_current_version }} From abd0cb0e53975df372ef62dfd9091574f1c79556 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 16:38:06 +0100 Subject: [PATCH 26/59] chore(pipeline): typo in workflow.yaml --- .github/workflows/workflow.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 3c103d8ae7..5781f35769 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -22,7 +22,7 @@ jobs: - id: set-matrix run: | echo "matrix={\"node-version\": [\"$NODE_LTS_MAINTENANCE_VERSION\", \"$NODE_LTS_ACTIVE_VERSION\", \"$NODE_LTS_CURRENT_VERSION\"]}" >> $GITHUB_OUTPUT - echo "es-matrix={\"es-version\": [\"$ELASTICSEARCH_MAINTENANCE_VERSION\", \"ELASTICSEARCH_ACTIVE_VERSION\"]} >> $GITHUB_OUTPUT + echo "es-matrix={\"es-version\": [\"$ELASTICSEARCH_MAINTENANCE_VERSION\", \"ELASTICSEARCH_ACTIVE_VERSION\"]}" >> $GITHUB_OUTPUT echo "test-set={\"test-set\":[\"jest\", \"http\", \"websocket\", \"legacy:mqtt\", \"legacy:http\", \"legacy:websocket\"]}" >> $GITHUB_OUTPUT outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} From 4b4a511b675cf9aa730cb3f077479053bc26fc79 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 16:42:55 +0100 Subject: [PATCH 27/59] chore(elasticsearch 8): remove unwanted console.log --- lib/service/storage/Elasticsearch.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/service/storage/Elasticsearch.ts b/lib/service/storage/Elasticsearch.ts index a8d3071829..10a20ebf62 100644 --- a/lib/service/storage/Elasticsearch.ts +++ b/lib/service/storage/Elasticsearch.ts @@ -13,8 +13,6 @@ export class Elasticsearch extends Service { constructor(config: any, scope = scopeEnum.PUBLIC) { super("elasticsearch", config); - console.log(config) - if (config.majorVersion === "7") { this.client = new ES7(config, scope); } else if (config.majorVersion === "8") { From 25c8910177262885382fc96f35998ca629d65153 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 16:52:35 +0100 Subject: [PATCH 28/59] test(pipeline): good Major_Version for ES --- .ci/services-7.yml | 1 + .ci/services-8.yml | 1 + .ci/test-cluster-7.yml | 1 + .ci/test-cluster-8.yml | 2 +- 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.ci/services-7.yml b/.ci/services-7.yml index f191b2e2e4..5fe1a32b76 100644 --- a/.ci/services-7.yml +++ b/.ci/services-7.yml @@ -13,6 +13,7 @@ services: sysctls: - net.core.somaxconn=8192 environment: + - kuzzle_services__storageEngine__majorVersion="7" - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__internalCache__node__host=redis - kuzzle_services__memoryStorage__node__host=redis diff --git a/.ci/services-8.yml b/.ci/services-8.yml index 102ab3892c..061096f488 100644 --- a/.ci/services-8.yml +++ b/.ci/services-8.yml @@ -13,6 +13,7 @@ services: sysctls: - net.core.somaxconn=8192 environment: + - kuzzle_services__storageEngine__majorVersion="8" - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__internalCache__node__host=redis - kuzzle_services__memoryStorage__node__host=redis diff --git a/.ci/test-cluster-7.yml b/.ci/test-cluster-7.yml index 9fe99e878a..c55aad06d4 100644 --- a/.ci/test-cluster-7.yml +++ b/.ci/test-cluster-7.yml @@ -18,6 +18,7 @@ x-kuzzle-config: &kuzzle-config - redis - elasticsearch environment: + - kuzzle_services__storageEngine__majorVersion="7" - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__storageEngine__commonMapping__dynamic=true - kuzzle_services__internalCache__node__host=redis diff --git a/.ci/test-cluster-8.yml b/.ci/test-cluster-8.yml index e400334fcd..f78f43be1c 100644 --- a/.ci/test-cluster-8.yml +++ b/.ci/test-cluster-8.yml @@ -18,7 +18,7 @@ x-kuzzle-config: &kuzzle-config - redis - elasticsearch environment: - - kuzzle_services__storageEngine__majorVersion=8 + - kuzzle_services__storageEngine__majorVersion="8" - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__storageEngine__commonMapping__dynamic=true - kuzzle_services__internalCache__node__host=redis From 2d51dad9520fbcf9c6e90cde0d1ff5598eca308f Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 17:26:04 +0100 Subject: [PATCH 29/59] test(tests): unit testing fixes --- test/core/backend/BackendStorage-es7.test.js | 2 +- test/core/backend/BackendStorage-es8.test.js | 2 +- test/core/storage/clientAdapter.test.js | 2 +- test/service/storage/elasticsearch-7.test.js | 2 +- test/service/storage/elasticsearch-8.test.js | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/test/core/backend/BackendStorage-es7.test.js b/test/core/backend/BackendStorage-es7.test.js index 6924b04f28..d3499808dc 100644 --- a/test/core/backend/BackendStorage-es7.test.js +++ b/test/core/backend/BackendStorage-es7.test.js @@ -24,7 +24,7 @@ describe("Backend", () => { describe("StorageManager#StorageClient", () => { it("should allows to construct an ES StorageClient", async () => { await application.start(); - global.kuzzle.config.services.storageEngine.majorVersion = 7; + global.kuzzle.config.services.storageEngine.majorVersion = "7"; global.kuzzle.config.services.storageEngine.client.node = "http://es-7:9200"; should(application.storage.StorageClient).be.a.Function(); diff --git a/test/core/backend/BackendStorage-es8.test.js b/test/core/backend/BackendStorage-es8.test.js index edb971aaaa..c8b5707832 100644 --- a/test/core/backend/BackendStorage-es8.test.js +++ b/test/core/backend/BackendStorage-es8.test.js @@ -24,7 +24,7 @@ describe("Backend", () => { describe("StorageManager#StorageClient", () => { it("should allows to construct an ES StorageClient", async () => { await application.start(); - global.kuzzle.config.services.storageEngine.majorVersion = 8; + global.kuzzle.config.services.storageEngine.majorVersion = "8"; global.kuzzle.config.services.storageEngine.client.node = "http://es-8:9200"; should(application.storage.StorageClient).be.a.Function(); diff --git a/test/core/storage/clientAdapter.test.js b/test/core/storage/clientAdapter.test.js index 5788693724..1885076c4e 100644 --- a/test/core/storage/clientAdapter.test.js +++ b/test/core/storage/clientAdapter.test.js @@ -38,7 +38,7 @@ describe("#core/storage/ClientAdapter", () => { beforeEach(async () => { kuzzle = new KuzzleMock(); - kuzzle.config.services.storageEngine.majorVersion = 7; + kuzzle.config.services.storageEngine.majorVersion = "7"; kuzzle.ask.restore(); publicAdapter = new ClientAdapter(scopeEnum.PUBLIC); diff --git a/test/service/storage/elasticsearch-7.test.js b/test/service/storage/elasticsearch-7.test.js index 61f116dea1..cbb562d992 100644 --- a/test/service/storage/elasticsearch-7.test.js +++ b/test/service/storage/elasticsearch-7.test.js @@ -40,7 +40,7 @@ describe("Test: ElasticSearch service", () => { beforeEach(async () => { kuzzle = new KuzzleMock(); - kuzzle.config.services.storageEngine.majorVersion = 7; + kuzzle.config.services.storageEngine.majorVersion = "7"; index = "nyc-open-data"; collection = "yellow-taxi"; diff --git a/test/service/storage/elasticsearch-8.test.js b/test/service/storage/elasticsearch-8.test.js index 052032c0bc..8564a44e1b 100644 --- a/test/service/storage/elasticsearch-8.test.js +++ b/test/service/storage/elasticsearch-8.test.js @@ -40,7 +40,7 @@ describe("Test: ElasticSearch service", () => { beforeEach(async () => { kuzzle = new KuzzleMock(); - kuzzle.config.services.storageEngine.majorVersion = 8; + kuzzle.config.services.storageEngine.majorVersion = "8"; index = "nyc-open-data"; collection = "yellow-taxi"; From f8351cd20c4c2b2550ab1b29d4ef67bc6f9b2ca6 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 17:44:59 +0100 Subject: [PATCH 30/59] test(pipeline): fix issues to pass es version --- .../actions/build-and-run-kuzzle/action.yml | 38 +++++++++++++++++-- .github/actions/functional-tests/action.yml | 3 ++ 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/.github/actions/build-and-run-kuzzle/action.yml b/.github/actions/build-and-run-kuzzle/action.yml index 4a7408f7f1..5f59ea013d 100644 --- a/.github/actions/build-and-run-kuzzle/action.yml +++ b/.github/actions/build-and-run-kuzzle/action.yml @@ -2,9 +2,12 @@ name: Build and Run Kuzzle image description: Build and Run Kuzzle image inputs: - KUZZLE_IMAGE: + kuzzle-image: description: Kuzzle image target required: true + es-version: + description: Elasticsearch version to use + required: true runs: using: "composite" @@ -19,11 +22,25 @@ runs: shell: bash - run: | - export KUZZLE_IMAGE=${{ inputs.KUZZLE_IMAGE }} - docker compose -f ./.ci/services.yml up -d + export KUZZLE_IMAGE=${{ inputs.kuzzle-image }} + export ES_VERSION=${{ inputs.es-version }} + + if [ "$ES_VERSION" == "7" ]; then + YML_FILE='./.ci/services-7.yml' + elif [ "$ES_VERSION" == "8" ]; then + YML_FILE='./.ci/services-8.yml' + else + echo "Invalid ES_VERSION. It should be either '7' or '8'." + exit 1 + fi + + docker compose -f $YML_FILE up -d shell: bash - run: | + export KUZZLE_IMAGE=${{ inputs.KUZZLE_IMAGE }} + export ES_VERSION=${{ inputs.es-version }} + if [ "$ES_VERSION" == "7" ]; then YML_FILE='./.ci/services-7.yml' elif [ "$ES_VERSION" == "8" ]; then @@ -41,5 +58,18 @@ runs: trap - err shell: bash - - run: docker compose -f ./.ci/services.yml down + - run: | + export KUZZLE_IMAGE=${{ inputs.KUZZLE_IMAGE }} + export ES_VERSION=${{ inputs.es-version }} + + if [ "$ES_VERSION" == "7" ]; then + YML_FILE='./.ci/services-7.yml' + elif [ "$ES_VERSION" == "8" ]; then + YML_FILE='./.ci/services-8.yml' + else + echo "Invalid ES_VERSION. It should be either '7' or '8'." + exit 1 + fi + + docker compose -f $YML_FILE down -v shell: bash diff --git a/.github/actions/functional-tests/action.yml b/.github/actions/functional-tests/action.yml index 015293936f..7efde74d86 100644 --- a/.github/actions/functional-tests/action.yml +++ b/.github/actions/functional-tests/action.yml @@ -10,6 +10,9 @@ inputs: node-version: description: Node version to use in the Kuzzle Docker image required: true + es-version: + description: Elasticsearch version to use + required: true runs: using: "composite" From 356b677fa7d712ccec608d9af3bc6b869e02101a Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 17:45:31 +0100 Subject: [PATCH 31/59] chore(pipeline): fixing typo --- .github/workflows/workflow.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 5781f35769..769ef04cbe 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -120,7 +120,7 @@ jobs: - name: Test to run Kuzzle image uses: ./.github/actions/build-and-run-kuzzle with: - KUZZLE_IMAGE: ${{ matrix.kuzzle-image }} + kuzzle-image: ${{ matrix.kuzzle-image }} es-version: ${{ matrix.es-version }} functional-tests: From 801209991e14c3d54ae0a9831122b401ef682b2e Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 18:27:55 +0100 Subject: [PATCH 32/59] chore(pipeline): typo in variable for workflow.yaml --- .github/workflows/workflow.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 769ef04cbe..85905b1c77 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -22,7 +22,7 @@ jobs: - id: set-matrix run: | echo "matrix={\"node-version\": [\"$NODE_LTS_MAINTENANCE_VERSION\", \"$NODE_LTS_ACTIVE_VERSION\", \"$NODE_LTS_CURRENT_VERSION\"]}" >> $GITHUB_OUTPUT - echo "es-matrix={\"es-version\": [\"$ELASTICSEARCH_MAINTENANCE_VERSION\", \"ELASTICSEARCH_ACTIVE_VERSION\"]}" >> $GITHUB_OUTPUT + echo "es-matrix={\"es-version\": [\"$ELASTICSEARCH_MAINTENANCE_VERSION\", \"$ELASTICSEARCH_ACTIVE_VERSION\"]}" >> $GITHUB_OUTPUT echo "test-set={\"test-set\":[\"jest\", \"http\", \"websocket\", \"legacy:mqtt\", \"legacy:http\", \"legacy:websocket\"]}" >> $GITHUB_OUTPUT outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} From 0514aff8828779547fa4ed4c76c5ec9f45101405 Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 18:39:39 +0100 Subject: [PATCH 33/59] test(pipeline): fixing environment variable for docker-compose --- .ci/services-8.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.ci/services-8.yml b/.ci/services-8.yml index 061096f488..3a823aa7ed 100644 --- a/.ci/services-8.yml +++ b/.ci/services-8.yml @@ -17,7 +17,6 @@ services: - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__internalCache__node__host=redis - kuzzle_services__memoryStorage__node__host=redis - - kuzzle_services__storageEngine__majorVersion=8 depends_on: - redis - elasticsearch From 570c44ce3b5eba09eb083c54140105e147beaace Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 4 Mar 2024 19:44:47 +0100 Subject: [PATCH 34/59] test(doco): fixing docker compose files --- .ci/scripts/run-test-cluster.sh | 16 ++++++++-------- .ci/services-7.yml | 2 +- .ci/services-8.yml | 2 +- .ci/test-cluster-7.yml | 2 +- .ci/test-cluster-8.yml | 2 +- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.ci/scripts/run-test-cluster.sh b/.ci/scripts/run-test-cluster.sh index cdd7a6059c..3f845907cf 100755 --- a/.ci/scripts/run-test-cluster.sh +++ b/.ci/scripts/run-test-cluster.sh @@ -19,17 +19,17 @@ else exit 1 fi -docker compose -f $YML_FILE down -v +# docker compose -f $YML_FILE down -v -echo "Installing dependencies..." -docker compose -f $YML_FILE run --rm kuzzle_node_1 npm ci +# echo "Installing dependencies..." +# docker compose -f $YML_FILE run --rm kuzzle_node_1 npm ci -if [ "$REBUILD" == "true" ]; -then - docker compose -f $YML_FILE run --rm kuzzle_node_1 npm rebuild -fi +# if [ "$REBUILD" == "true" ]; +# then +# docker compose -f $YML_FILE run --rm kuzzle_node_1 npm rebuild +# fi -docker compose -f $YML_FILE run --rm kuzzle_node_1 npm run build +# docker compose -f $YML_FILE run --rm kuzzle_node_1 npm run build echo "[$(date)] - Starting Kuzzle Cluster..." diff --git a/.ci/services-7.yml b/.ci/services-7.yml index 5fe1a32b76..d688ab6501 100644 --- a/.ci/services-7.yml +++ b/.ci/services-7.yml @@ -13,7 +13,7 @@ services: sysctls: - net.core.somaxconn=8192 environment: - - kuzzle_services__storageEngine__majorVersion="7" + - kuzzle_services__storageEngine__majorVersion=7 - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__internalCache__node__host=redis - kuzzle_services__memoryStorage__node__host=redis diff --git a/.ci/services-8.yml b/.ci/services-8.yml index 3a823aa7ed..64134791d9 100644 --- a/.ci/services-8.yml +++ b/.ci/services-8.yml @@ -13,7 +13,7 @@ services: sysctls: - net.core.somaxconn=8192 environment: - - kuzzle_services__storageEngine__majorVersion="8" + - kuzzle_services__storageEngine__majorVersion=8 - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__internalCache__node__host=redis - kuzzle_services__memoryStorage__node__host=redis diff --git a/.ci/test-cluster-7.yml b/.ci/test-cluster-7.yml index c55aad06d4..14a399dbaa 100644 --- a/.ci/test-cluster-7.yml +++ b/.ci/test-cluster-7.yml @@ -18,7 +18,7 @@ x-kuzzle-config: &kuzzle-config - redis - elasticsearch environment: - - kuzzle_services__storageEngine__majorVersion="7" + - kuzzle_services__storageEngine__majorVersion=7 - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__storageEngine__commonMapping__dynamic=true - kuzzle_services__internalCache__node__host=redis diff --git a/.ci/test-cluster-8.yml b/.ci/test-cluster-8.yml index f78f43be1c..e400334fcd 100644 --- a/.ci/test-cluster-8.yml +++ b/.ci/test-cluster-8.yml @@ -18,7 +18,7 @@ x-kuzzle-config: &kuzzle-config - redis - elasticsearch environment: - - kuzzle_services__storageEngine__majorVersion="8" + - kuzzle_services__storageEngine__majorVersion=8 - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - kuzzle_services__storageEngine__commonMapping__dynamic=true - kuzzle_services__internalCache__node__host=redis From ec98b8990a8c9c0c322dda54a40225752a10801f Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 5 Mar 2024 09:17:39 +0100 Subject: [PATCH 35/59] test(pipeline): uncomment code to properly build kuzzle --- .ci/scripts/run-test-cluster.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.ci/scripts/run-test-cluster.sh b/.ci/scripts/run-test-cluster.sh index 3f845907cf..cdd7a6059c 100755 --- a/.ci/scripts/run-test-cluster.sh +++ b/.ci/scripts/run-test-cluster.sh @@ -19,17 +19,17 @@ else exit 1 fi -# docker compose -f $YML_FILE down -v +docker compose -f $YML_FILE down -v -# echo "Installing dependencies..." -# docker compose -f $YML_FILE run --rm kuzzle_node_1 npm ci +echo "Installing dependencies..." +docker compose -f $YML_FILE run --rm kuzzle_node_1 npm ci -# if [ "$REBUILD" == "true" ]; -# then -# docker compose -f $YML_FILE run --rm kuzzle_node_1 npm rebuild -# fi +if [ "$REBUILD" == "true" ]; +then + docker compose -f $YML_FILE run --rm kuzzle_node_1 npm rebuild +fi -# docker compose -f $YML_FILE run --rm kuzzle_node_1 npm run build +docker compose -f $YML_FILE run --rm kuzzle_node_1 npm run build echo "[$(date)] - Starting Kuzzle Cluster..." From 4e525bad66542b028623c46ae30e83ca745cb7b8 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 5 Mar 2024 17:58:27 +0100 Subject: [PATCH 36/59] test(tests): functional tests --- docker/scripts/start-kuzzle-test.ts | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/docker/scripts/start-kuzzle-test.ts b/docker/scripts/start-kuzzle-test.ts index 3eb5fac8c6..5e9c8c64e2 100644 --- a/docker/scripts/start-kuzzle-test.ts +++ b/docker/scripts/start-kuzzle-test.ts @@ -423,9 +423,18 @@ app.controller.register("tests", { const response = await client.index(esRequest); const response2 = await app.storage.storageClient.index(esRequest); - should(omit(response, ["_version", "result", "_seq_no"])).match( - omit(response2, ["_version", "result", "_seq_no"]) - ); + if (response.body && response2.body) { + // ES7 + should(omit(response.body, ["_version", "result", "_seq_no"])).match( + omit(response2.body, ["_version", "result", "_seq_no"]) + ); + } else { + // ES8 + should(omit(response, ["_version", "result", "_seq_no"])).match( + omit(response2, ["_version", "result", "_seq_no"]) + ); + } + return response; }, From a2283ed9d3c51c70be8d87dcd6d716ba3e4eefc2 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 5 Mar 2024 18:10:38 +0100 Subject: [PATCH 37/59] ci(pipeline): update checkout@v3 to checkout@v4 --- .github/workflows/codeql.yml | 2 +- .github/workflows/core-dev.yml | 2 +- .github/workflows/elasticsearch.yml | 2 +- .github/workflows/kuzzle-core.yml | 2 +- .github/workflows/kuzzle-plugin-dev.yml | 2 +- .github/workflows/kuzzle-runner.yml | 2 +- .github/workflows/workflow-deployments.yml | 4 ++-- .github/workflows/workflow.yaml | 14 +++++++------- 8 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7a711fc5a1..50163193d7 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,7 +24,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Initialize CodeQL uses: github/codeql-action/init@v2 diff --git a/.github/workflows/core-dev.yml b/.github/workflows/core-dev.yml index e270299f97..b698376eb1 100644 --- a/.github/workflows/core-dev.yml +++ b/.github/workflows/core-dev.yml @@ -13,7 +13,7 @@ jobs: name: core-dev image runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/elasticsearch.yml b/.github/workflows/elasticsearch.yml index 40745ce449..1ea3914bf2 100644 --- a/.github/workflows/elasticsearch.yml +++ b/.github/workflows/elasticsearch.yml @@ -14,7 +14,7 @@ jobs: name: Elasticsearch image runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/kuzzle-core.yml b/.github/workflows/kuzzle-core.yml index 7e6c80fe4b..041520298f 100644 --- a/.github/workflows/kuzzle-core.yml +++ b/.github/workflows/kuzzle-core.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/kuzzle-plugin-dev.yml b/.github/workflows/kuzzle-plugin-dev.yml index c391caf4e1..96ea13cc1e 100644 --- a/.github/workflows/kuzzle-plugin-dev.yml +++ b/.github/workflows/kuzzle-plugin-dev.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/kuzzle-runner.yml b/.github/workflows/kuzzle-runner.yml index 9187283675..af94b87cdd 100644 --- a/.github/workflows/kuzzle-runner.yml +++ b/.github/workflows/kuzzle-runner.yml @@ -16,7 +16,7 @@ jobs: matrix: node-version: [16, 18, 20] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/workflow-deployments.yml b/.github/workflows/workflow-deployments.yml index f68e29e554..e212299f49 100644 --- a/.github/workflows/workflow-deployments.yml +++ b/.github/workflows/workflow-deployments.yml @@ -63,7 +63,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Extract references from context shell: bash @@ -92,7 +92,7 @@ jobs: pull-requests: write steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install additional libraries uses: ./.github/actions/install-packages diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 85905b1c77..94255a978a 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -37,7 +37,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install additional libraries uses: ./.github/actions/install-packages @@ -59,7 +59,7 @@ jobs: needs: [prepare-matrix] steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install additional libraries uses: ./.github/actions/install-packages @@ -86,7 +86,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install additional libraries uses: ./.github/actions/install-packages @@ -115,7 +115,7 @@ jobs: es-version: ${{ fromJson(needs.prepare-matrix.outputs.es-matrix).es-version }} steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Test to run Kuzzle image uses: ./.github/actions/build-and-run-kuzzle @@ -134,7 +134,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install additional libraries uses: ./.github/actions/install-packages @@ -161,13 +161,13 @@ jobs: es-version: ${{ fromJson(needs.prepare-matrix.outputs.es-matrix).es-version }} steps: - name: Checkout project - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install additional libraries uses: ./.github/actions/install-packages - name: Cloning Monkey Tester - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: kuzzleio/kuzzle-monkey-tests path: "kuzzle-monkey-tests" From 05f5d4eaa2c2195e2492fd3b403a465527973e37 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 6 Mar 2024 08:59:21 +0100 Subject: [PATCH 38/59] test(pipeline): typo in action.yaml build and run kuzzle --- .github/actions/build-and-run-kuzzle/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/build-and-run-kuzzle/action.yml b/.github/actions/build-and-run-kuzzle/action.yml index 5f59ea013d..1e3f0c588a 100644 --- a/.github/actions/build-and-run-kuzzle/action.yml +++ b/.github/actions/build-and-run-kuzzle/action.yml @@ -38,7 +38,7 @@ runs: shell: bash - run: | - export KUZZLE_IMAGE=${{ inputs.KUZZLE_IMAGE }} + export KUZZLE_IMAGE=${{ inputs.kuzzle-image }} export ES_VERSION=${{ inputs.es-version }} if [ "$ES_VERSION" == "7" ]; then @@ -59,7 +59,7 @@ runs: shell: bash - run: | - export KUZZLE_IMAGE=${{ inputs.KUZZLE_IMAGE }} + export KUZZLE_IMAGE=${{ inputs.kuzzle-image }} export ES_VERSION=${{ inputs.es-version }} if [ "$ES_VERSION" == "7" ]; then From 626c375e6a3d466bf478b5aa1c0b63c4aab9223b Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 12 Mar 2024 11:15:04 +0100 Subject: [PATCH 39/59] docs(es8): update documentation about ES8, with requested changes --- doc/2/guides/elasticsearch/v8/index.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/2/guides/elasticsearch/v8/index.md b/doc/2/guides/elasticsearch/v8/index.md index 1f505d950e..47fde9a2e9 100644 --- a/doc/2/guides/elasticsearch/v8/index.md +++ b/doc/2/guides/elasticsearch/v8/index.md @@ -22,13 +22,13 @@ Kuzzle exposes the [Elasticsearch Query Language](/core/2/guides/main-concepts/q The support of Elasticsearch 8 has been introduced in Kuzzle 2.30.0. -The choice has been made to keep kuzzle compatible to avoid breaking changes around the support of ES8. +The choice has been made to keep Kuzzle compatible to avoid breaking changes around the support of ES8. -We wanted to allow the user to OPT-IN for the feature so no modification is needed on your behalf to stay with Elasticsearch 7. +We wanted to allow the user to **opt-in** for this feature, so no modification is needed on your behalf if you want to keep using Elasticsearch 7. -By default the majorVersion support will be 7 until Kuzzle v3. +The default major version of Elasticsearch will be 7 until Kuzzle v3. -The new key to change the version supported by is available under +The new configuration key to change the version supported is available under: ```json { @@ -41,11 +41,11 @@ The new key to change the version supported by is available under ``` :::warning -You can not set the majorVersion to 8 if you are using a version of Kuzzle that does not support it. +You can not set the `majorVersion` key to 8 if you are using a version of Kuzzle that does not support it. ::: :::info -Kuzzle cannot connect to both ES7 and ES8 at the same time. +Kuzzle cannot connect to both Elasticsearch 7 and Elasticsearch 8 at the same time. ::: Once the version is set to 8, Kuzzle will use the Elasticsearch 8 API to communicate with the database. From d48b6875eb761666bd8d5820501af4ad74efb262 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 12 Mar 2024 11:18:41 +0100 Subject: [PATCH 40/59] chore(releaserc): add a new branch elasticsearch-8 in releaserc to publish a new beta --- .releaserc.json | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.releaserc.json b/.releaserc.json index 46771fa1eb..132eade56d 100644 --- a/.releaserc.json +++ b/.releaserc.json @@ -1,7 +1,16 @@ { - "extends": ["semantic-release-config-kuzzle"], + "extends": [ + "semantic-release-config-kuzzle" + ], "branches": [ "master", - { "name": "beta", "prerelease": true } + { + "name": "beta", + "prerelease": true + }, + { + "name": "elasticsearch-8", + "prerelease": true + } ] } \ No newline at end of file From dac364adea563830a4857ac96db9731943ae7ded Mon Sep 17 00:00:00 2001 From: rolljee Date: Mon, 18 Mar 2024 15:34:37 +0100 Subject: [PATCH 41/59] test(test): fixing tests file --- .eslintrc.json | 4 ++-- docker/scripts/start-kuzzle-test.ts | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.eslintrc.json b/.eslintrc.json index d59775fe54..a3229fee13 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -7,7 +7,7 @@ "plugin:kuzzle/node" ], "rules": { - "sort-keys": "off", - "kuzzle/array-foreach": "off" + "sort-keys": "warn", + "kuzzle/array-foreach": "warn" } } \ No newline at end of file diff --git a/docker/scripts/start-kuzzle-test.ts b/docker/scripts/start-kuzzle-test.ts index 5e9c8c64e2..dc8f8e92fc 100644 --- a/docker/scripts/start-kuzzle-test.ts +++ b/docker/scripts/start-kuzzle-test.ts @@ -356,8 +356,8 @@ app.errors.register("app", "api", "custom", { app.hook.register( "generic:document:afterUpdate", - (documents, request: KuzzleRequest): void => { - app.sdk.document.createOrReplace( + async (documents, request: KuzzleRequest) => { + await app.sdk.document.createOrReplace( request.getIndex(), request.getCollection(), "generic:document:afterUpdate", @@ -428,15 +428,14 @@ app.controller.register("tests", { should(omit(response.body, ["_version", "result", "_seq_no"])).match( omit(response2.body, ["_version", "result", "_seq_no"]) ); + return response.body; } else { // ES8 should(omit(response, ["_version", "result", "_seq_no"])).match( omit(response2, ["_version", "result", "_seq_no"]) ); + return response; } - - - return response; }, http: [{ verb: "post", path: "/tests/storage-client/:index" }], }, From 52f758263f818afc8a7404694eb67891e0fd09c7 Mon Sep 17 00:00:00 2001 From: rolljee Date: Thu, 21 Mar 2024 16:32:39 +0100 Subject: [PATCH 42/59] docs(docs): add compose override and modify contributing --- CONTRIBUTING.md | 8 + bin/.lib/colorOutput.js | 69 +++ bin/.upgrades/connectors/es.js | 102 ++++ bin/.upgrades/connectors/redis.js | 110 ++++ bin/.upgrades/lib/connectorContext.js | 38 ++ bin/.upgrades/lib/context.js | 143 ++++++ bin/.upgrades/lib/formatters.js | 103 ++++ bin/.upgrades/lib/inquirerExtended.js | 51 ++ bin/.upgrades/lib/logger.js | 98 ++++ bin/.upgrades/lib/progressBar.js | 68 +++ .../versions/v1/checkConfiguration.js | 81 +++ bin/.upgrades/versions/v1/index.js | 35 ++ bin/.upgrades/versions/v1/upgradeCache.js | 145 ++++++ bin/.upgrades/versions/v1/upgradeStorage.js | 468 ++++++++++++++++++ docker-compose.override.yml | 23 + docker-compose.yml | 11 +- 16 files changed, 1543 insertions(+), 10 deletions(-) create mode 100644 bin/.lib/colorOutput.js create mode 100644 bin/.upgrades/connectors/es.js create mode 100644 bin/.upgrades/connectors/redis.js create mode 100644 bin/.upgrades/lib/connectorContext.js create mode 100644 bin/.upgrades/lib/context.js create mode 100644 bin/.upgrades/lib/formatters.js create mode 100644 bin/.upgrades/lib/inquirerExtended.js create mode 100644 bin/.upgrades/lib/logger.js create mode 100644 bin/.upgrades/lib/progressBar.js create mode 100644 bin/.upgrades/versions/v1/checkConfiguration.js create mode 100644 bin/.upgrades/versions/v1/index.js create mode 100644 bin/.upgrades/versions/v1/upgradeCache.js create mode 100644 bin/.upgrades/versions/v1/upgradeStorage.js create mode 100644 docker-compose.override.yml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bd2157615d..1e9848cf81 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -36,9 +36,17 @@ git clone git@github.com:kuzzleio/kuzzle.git cd kuzzle # Start a kuzzle cluster with development tools enabled +# This will start a kuzzle with Elasticsearch 7 +docker compose -f docker-compose.yml up + +# Start a kuzzle cluster with development tools enabled +# This will start a kuzzle with Elasticsearch 8 +# See [docker-compose.override.yml](docker-compose.override.yml) for more details docker compose up ``` +⚠️ **Important**: The two docker-compose command launch launch different configurations. + ## ENOSPC error On some Linux environments, you may get `ENOSPC` errors from the filesystem watcher, because of limits set too low. diff --git a/bin/.lib/colorOutput.js b/bin/.lib/colorOutput.js new file mode 100644 index 0000000000..446576730c --- /dev/null +++ b/bin/.lib/colorOutput.js @@ -0,0 +1,69 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const clc = require("cli-color"); + +function noop(str) { + return str; +} + +class ColorOutput { + constructor(opts) { + // /!\ "opts" might be a string + const noColors = + typeof opts === "object" && opts.parent && opts.parent.noColors; + + this.format = { + error: noColors ? noop : clc.red, + warn: noColors ? noop : clc.yellow, + notice: noColors ? noop : clc.cyanBright, + ok: noColors ? noop : clc.green.bold, + question: noColors ? noop : clc.whiteBright, + }; + } + + /* eslint-disable no-console */ + error(str) { + console.error(this.format.error(str)); + } + + warn(str) { + console.warn(this.format.warn(str)); + } + + notice(str) { + console.log(this.format.notice(str)); + } + + question(str) { + console.log(this.format.question(str)); + } + + ok(str) { + console.log(this.format.ok(str)); + } + + /* eslint-enable no-console */ +} + +module.exports = ColorOutput; diff --git a/bin/.upgrades/connectors/es.js b/bin/.upgrades/connectors/es.js new file mode 100644 index 0000000000..425f324764 --- /dev/null +++ b/bin/.upgrades/connectors/es.js @@ -0,0 +1,102 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const { formatWithOptions } = require("util"); + +const { Client } = require("@elastic/elasticsearch"); +const validator = require("validator"); +const _ = require("lodash"); + +const ConnectorContext = require("../lib/connectorContext"); + +let promise = null; + +async function getEsClient(context) { + const currentConfiguration = _.get( + context.config, + "services.storageEngine.client", + ); + + if (!currentConfiguration) { + context.log.error("Missing Kuzzle configuration for Elasticsearch."); + context.log.error( + "Missing configuration value: services.storageEngine.client", + ); + context.log.error("Aborted."); + process.exit(1); + } + + context.log.notice("Current Elasticsearch configuration:"); + context.log.print( + formatWithOptions({ colors: false, depth: null }, currentConfiguration), + ); + + const answers = await context.inquire.prompt([ + { + choices: ["source", "target", "source and target"], + default: "target", + message: "For this migration, use this current instance as the data", + name: "current", + type: "list", + }, + { + default: "", + message: ({ current }) => + `Enter the URL for the ${ + current === "source" ? "target" : "source" + } instance:`, + name: "url", + type: "input", + validate: (url) => { + const opts = { + protocols: ["http", "https"], + require_port: true, + require_protocol: true, + require_tld: false, + require_valid_protocol: true, + }; + + return ( + validator.isURL(url, opts) || + "A valid URL must be provided. Example: http://:" + ); + }, + when: ({ current }) => current !== "source and target", + }, + ]); + + const current = new Client(currentConfiguration); + const next = answers.url ? new Client({ node: answers.url }) : current; + + return answers.current === "source" + ? new ConnectorContext(context, current, next) + : new ConnectorContext(context, next, current); +} + +module.exports = async (context) => { + if (promise === null) { + promise = getEsClient(context); + } + + return promise; +}; diff --git a/bin/.upgrades/connectors/redis.js b/bin/.upgrades/connectors/redis.js new file mode 100644 index 0000000000..3829b20259 --- /dev/null +++ b/bin/.upgrades/connectors/redis.js @@ -0,0 +1,110 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const { formatWithOptions } = require("util"), + IORedis = require("ioredis"), + ConnectorContext = require("../lib/connectorContext"), + _ = require("lodash"); + +let promise = null; + +async function getRedisClient(context) { + const currentConfiguration = _.get(context.config, "services.internalCache"); + + if (!currentConfiguration) { + context.log.error("Missing Kuzzle configuration for Redis."); + context.log.error("Missing configuration value: services.internalCache"); + context.log.error("Aborted."); + process.exit(1); + } + + context.log.notice("Current Redis configuration:"); + context.log.print( + formatWithOptions({ colors: false, depth: null }, currentConfiguration), + ); + + const current = await context.inquire.direct({ + choices: ["source", "target", "source and target"], + default: "target", + message: "For this migration, use this current instance as the data", + type: "list", + }); + + const remaining = current === "source" ? "target" : "source"; + let answers = null; + + if (current !== "source and target") { + answers = await context.inquire.prompt([ + { + default: "", + message: `${remaining} server name or IP:`, + name: "server", + type: "input", + validate: (name) => name.length > 0 || "Non-empty string expected", + }, + { + default: "", + message: `${remaining} server port:`, + name: "port", + type: "input", + validate: (name) => { + const i = Number.parseFloat(name); + + if (!Number.isNaN(i) && Number.isInteger(i) && i > 1 && i <= 65535) { + return true; + } + + return "Invalid port number"; + }, + }, + ]); + } + + const options = { enableReadyCheck: true, lazyConnect: true }, + client = currentConfiguration.nodes + ? new IORedis.Cluster(currentConfiguration, options) + : new IORedis(currentConfiguration.node, options); + + await client.connect(); + + let next; + + if (answers) { + next = new IORedis(answers.port, answers.server, options); + await next.connect(); + } else { + next = client; + } + + return current === "source" + ? new ConnectorContext(context, client, next) + : new ConnectorContext(context, next, client); +} + +module.exports = async (context) => { + if (promise === null) { + promise = getRedisClient(context); + } + + return promise; +}; diff --git a/bin/.upgrades/lib/connectorContext.js b/bin/.upgrades/lib/connectorContext.js new file mode 100644 index 0000000000..515cd4ad4a --- /dev/null +++ b/bin/.upgrades/lib/connectorContext.js @@ -0,0 +1,38 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const UpgradeContext = require("./context"); + +class ConnectorContext extends UpgradeContext { + constructor(context, source, target) { + super(context); + this.source = source; + this.target = target; + } + + get inPlace() { + return this.source === this.target; + } +} + +module.exports = ConnectorContext; diff --git a/bin/.upgrades/lib/context.js b/bin/.upgrades/lib/context.js new file mode 100644 index 0000000000..1f9e4fdc6b --- /dev/null +++ b/bin/.upgrades/lib/context.js @@ -0,0 +1,143 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const fs = require("fs"); +const path = require("path"); + +const rc = require("rc"); + +const inquirer = require("./inquirerExtended"); +const Logger = require("./logger"); + +const defaultConfiguration = require("../../../lib/config/default.config"); +const { version: currentVersion } = require("../../../package.json"); + +class Version { + constructor() { + this.from = null; + this.list = []; + } +} + +class UpgradeContext { + constructor(args) { + // copy constructor + if (args instanceof UpgradeContext) { + this.config = args.config; + this.log = args.log; + this.inquire = args.inquire; + this.version = args.version; + this.argv = args.argv; + } else { + this.config = null; + this.log = new Logger(args); + this.inquire = inquirer; + this.version = null; + this.argv = args; + } + } + + async init() { + await this.loadConfiguration(); + + if (this.config.configs) { + this.log.ok("Configuration files loaded:"); + this.config.configs.forEach((f) => this.log.print(`\t- ${f}`)); + } + + this.version = await this.getVersions(); + } + + async loadConfiguration() { + let cfg; + + try { + cfg = rc("kuzzle", JSON.parse(JSON.stringify(defaultConfiguration))); + this.config = cfg; + return; + } catch (e) { + this.log.error(`Cannot load configuration files: ${e.message}`); + if (this.config === null) { + this.log.error( + "Check your configuration files, and restart the upgrade script.", + ); + process.exit(1); + } + } + + // If we are here, this means that an error was thrown, due to a change made + // to configuration files *during* the upgrade (probably because a version + // upgrade asked the user to modify their configuration files manually) + // To prevent aborting unnecessarily during the upgrade process, we ask the + // user to fix the situation + const retry = await this.inquire.direct({ + default: true, + message: "Retry?", + type: "confirm", + }); + + if (!retry) { + this.log.error("Aborted by user action."); + process.exit(1); + } + + await this.loadConfiguration(); + } + + /** + * Asks the user the source version to upgrade from + * @returns {Version} + */ + async getVersions() { + const version = new Version(); + + this.log.print(`Current Kuzzle version: ${currentVersion}`); + + version.list = fs + .readdirSync(path.resolve(`${__dirname}/../versions`), { + withFileTypes: true, + }) + .filter((entry) => entry.isDirectory() && entry.name.match(/^v\d+$/)) + .map((entry) => entry.name) + .sort( + (a, b) => parseInt(a[0].substring(1)) - parseInt(b[0].substring(1)), + ); + + if (version.list.length === 1) { + version.from = version.list[0]; + } else { + version.from = await inquirer.direct({ + choices: version.list, + default: version.list[version.list.length - 1], + message: "Migrate from which version", + type: "list", + }); + + version.list = version.list.slice(version.list.indexOf(version.from)); + } + + return version; + } +} + +module.exports = UpgradeContext; diff --git a/bin/.upgrades/lib/formatters.js b/bin/.upgrades/lib/formatters.js new file mode 100644 index 0000000000..ae360ef01f --- /dev/null +++ b/bin/.upgrades/lib/formatters.js @@ -0,0 +1,103 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const clc = require("cli-color"); + +class RawFormatter { + raw(msg) { + return msg; + } +} + +class ColoredFormatter extends RawFormatter { + error(msg) { + return clc.red(msg); + } + + warn(msg) { + return clc.yellow(msg); + } + + notice(msg) { + return clc.cyan(msg); + } + + ok(msg) { + return clc.green(msg); + } + + question(msg) { + return clc.whiteBright(msg); + } +} + +class PrefixedFormatter extends RawFormatter { + error(msg) { + return `[ERROR] ${msg}`; + } + + warn(msg) { + return `[WARN] ${msg}`; + } + + notice(msg) { + return `[i] ${msg}`; + } + + ok(msg) { + return `[OK] ${msg}`; + } + + question(msg) { + return `[?] ${msg}`; + } +} + +class FileFormatter extends PrefixedFormatter { + error(msg) { + return Buffer.from(`[${new Date().toISOString()}]${super.error(msg)}\n`); + } + + warn(msg) { + return Buffer.from(`[${new Date().toISOString()}]${super.warn(msg)}\n`); + } + + notice(msg) { + return Buffer.from(`[${new Date().toISOString()}]${super.notice(msg)}\n`); + } + + ok(msg) { + return Buffer.from(`[${new Date().toISOString()}]${super.ok(msg)}\n`); + } + + question(msg) { + return Buffer.from(`[${new Date().toISOString()}]${super.question(msg)}\n`); + } + + // @override + raw(msg) { + return Buffer.from(`${msg}\n`); + } +} + +module.exports = { ColoredFormatter, FileFormatter, PrefixedFormatter }; diff --git a/bin/.upgrades/lib/inquirerExtended.js b/bin/.upgrades/lib/inquirerExtended.js new file mode 100644 index 0000000000..65ff08c368 --- /dev/null +++ b/bin/.upgrades/lib/inquirerExtended.js @@ -0,0 +1,51 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +// Returns an instance of "inquirer" extended with a "direct" function that +// returns the answer directly, instead of a key-value map of answers. +// Useful only because we often prompt questions one by one, with tasks in the +// middle, and this "direct" function helps reducing the clutter. + +const assert = require("assert").strict, + inquirer = require("inquirer"), + _ = require("lodash"); + +inquirer.direct = async function direct(prompt) { + assert( + _.isPlainObject(prompt), + "Invalid argument: expected a non-empty object", + ); + assert( + typeof prompt.name === "undefined", + 'Unexpected "name" argument: if you need to set a name, use inquirer.prompt', + ); + + const p = _.cloneDeep(prompt); + p.name = "foo"; + + const { foo } = await inquirer.prompt(p); + + return foo; +}; + +module.exports = inquirer; diff --git a/bin/.upgrades/lib/logger.js b/bin/.upgrades/lib/logger.js new file mode 100644 index 0000000000..08892989cb --- /dev/null +++ b/bin/.upgrades/lib/logger.js @@ -0,0 +1,98 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const fs = require("fs"), + { + ColoredFormatter, + PrefixedFormatter, + FileFormatter, + } = require("./formatters"); + +class ColorOutput { + constructor(opts) { + this.terminalFormatter = opts.C + ? new PrefixedFormatter() + : new ColoredFormatter(); + + this.fileFormatter = new FileFormatter(); + + this.fileReport = null; + + if (!opts.R) { + this.notice(`Upgrade report file: ${opts.output}`); + this.fileReport = fs.openSync(opts.output, "w", 0o600); + } + } + + /* eslint-disable no-console */ + error(str) { + console.error(this.terminalFormatter.error(str)); + + if (this.fileReport) { + fs.writeSync(this.fileReport, this.fileFormatter.error(str)); + } + } + + warn(str) { + console.warn(this.terminalFormatter.warn(str)); + + if (this.fileReport) { + fs.writeSync(this.fileReport, this.fileFormatter.warn(str)); + } + } + + notice(str) { + console.log(this.terminalFormatter.notice(str)); + + if (this.fileReport) { + fs.writeSync(this.fileReport, this.fileFormatter.notice(str)); + } + } + + question(str) { + console.log(this.terminalFormatter.question(str)); + + if (this.fileReport) { + fs.writeSync(this.fileReport, this.fileFormatter.question(str)); + } + } + + ok(str) { + console.log(this.terminalFormatter.ok(str)); + + if (this.fileReport) { + fs.writeSync(this.fileReport, this.fileFormatter.ok(str)); + } + } + + print(str) { + console.log(this.terminalFormatter.raw(str)); + + if (this.fileReport) { + fs.writeSync(this.fileReport, this.fileFormatter.raw(str)); + } + } + /* eslint-enable no-console */ +} + +module.exports = ColorOutput; diff --git a/bin/.upgrades/lib/progressBar.js b/bin/.upgrades/lib/progressBar.js new file mode 100644 index 0000000000..b53b0ed1ef --- /dev/null +++ b/bin/.upgrades/lib/progressBar.js @@ -0,0 +1,68 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const moment = require("moment"); + +// Simple progress bar making the wait for long tasks more bearable +class ProgressBar { + constructor(context, text, total, barSize = 20) { + this.text = text; + this.total = total; + this.barSize = barSize; + this.bar = new context.inquire.ui.BottomBar(); + this.update(0); + this.start = Date.now(); + } + + destroy() { + this.bar.updateBottomBar(""); + this.bar.close(); + } + + update(count) { + const remaining = this._getRemainingTime(count), + str = `${this.text} +${this._getBar(count)}(remaining: ${remaining}) ${count} / ${this.total}`; + + this.bar.updateBottomBar(str); + } + + _getRemainingTime(count) { + const elapsed = Date.now() - this.start, + remaining = + count > 0 ? Math.round((this.total * elapsed) / count) - elapsed : 0; + + return moment(remaining).format("mm:ss"); + } + + _getBar(count) { + const percent = (count * 100) / this.total, + progress = Math.round((percent * this.barSize) / 100); + + return ( + "[" + "#".repeat(progress) + "-".repeat(this.barSize - progress) + "]" + ); + } +} + +module.exports = ProgressBar; diff --git a/bin/.upgrades/versions/v1/checkConfiguration.js b/bin/.upgrades/versions/v1/checkConfiguration.js new file mode 100644 index 0000000000..6b2c93c393 --- /dev/null +++ b/bin/.upgrades/versions/v1/checkConfiguration.js @@ -0,0 +1,81 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const _ = require("lodash"); + +module.exports = async function check(context) { + let action = false; + const warn = (msg) => context.log.warn(`[CONFIG. FILES] ${msg}`); + const renamed = { + "services.db": "services.storageEngine", + "services.internalEngine": "services.internalIndex", + "services.storageEngine.commonMapping._kuzzle_info": + "services.storageEngine.commonMapping.properties._kuzzle_info", + "services.storageEngine.dynamic": + "services.storageEngine.commonMapping.dynamic", + }; + const deprecated = [ + "server.entryPoints", + "server.protocols.socketio", + "server.proxy", + "services.garbageCollector", + "services.storageEngine.client.apiVersion", + "services.storageEngine.commonMapping.properties._kuzzle_info.deletedAt", + "services.storageEngine.commonMapping.properties._kuzzle_info.active", + ]; + + for (const [oldName, newName] of Object.entries(renamed)) { + if (_.get(context.config, oldName)) { + action = true; + warn(`The configuration key "${oldName}" is now named "${newName}"`); + } + } + + for (const name of deprecated) { + if (_.get(context.config, name)) { + action = true; + warn(`The configuration key "${name}" is obsolete and should be removed`); + } + } + + if (action) { + const choices = ["Check again", "Abort", "Ignore (not recommended)"], + proceed = await context.inquire.direct({ + choices, + default: choices[0], + message: "Configuration files need to be updated:", + type: "list", + }); + + if (proceed === choices[0]) { + await context.loadConfiguration(); + return check(context); + } + + if (proceed === choices[1]) { + process.exit(1); + } + } + + context.log.ok("Configuration files checked: OK"); +}; diff --git a/bin/.upgrades/versions/v1/index.js b/bin/.upgrades/versions/v1/index.js new file mode 100644 index 0000000000..47350723b3 --- /dev/null +++ b/bin/.upgrades/versions/v1/index.js @@ -0,0 +1,35 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const checkConfiguration = require("./checkConfiguration"); +const upgradeStorage = require("./upgradeStorage"); +const upgradeCache = require("./upgradeCache"); + +module.exports = async function upgrade(context) { + context.log.notice("\n\n=== CONFIGURATION FILES ==="); + await checkConfiguration(context); + context.log.notice("\n\n=== STORAGE ==="); + await upgradeStorage(context); + context.log.notice("\n\n=== CACHE ==="); + await upgradeCache(context); +}; diff --git a/bin/.upgrades/versions/v1/upgradeCache.js b/bin/.upgrades/versions/v1/upgradeCache.js new file mode 100644 index 0000000000..30f45d6ed8 --- /dev/null +++ b/bin/.upgrades/versions/v1/upgradeCache.js @@ -0,0 +1,145 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const getRedisConnector = require("../../connectors/redis"); + +async function copyKey(context, key) { + const dump = await context.source.dumpBuffer(key), + ttl = Math.max(0, await context.source.pttl(key)); + + // Breaking change from v1 to v2, due to how indexes are handled: + // token keys are prefixed "repos/%kuzzle/token" in v1, and + // "repos/kuzzle/token" in v2 + const newKey = key.replace("repos/%kuzzle/token", "repos/kuzzle/token"); + + await context.target.restore(newKey, ttl, dump, "REPLACE"); +} + +async function getSourceKeys(context, pattern) { + if (!context.source.nodes) { + return context.source.keys(pattern); + } + + const keys = []; + + for (const node of context.source.nodes("master")) { + keys.push(...(await node.source.keys(pattern))); + } + + return keys; +} + +async function copyDatabase(context, db) { + await context.source.select(db); + await context.target.select(db); + + await context.target.flushdb(); + + const keys = await getSourceKeys(context, "*"); + + for (const key of keys) { + await copyKey(context, key); + } + + context.log.ok(`Imported cache keys from database ${db}`); +} + +async function inPlaceMigration(context) { + context.log.notice(` +In-place migration detected: this script will make the changes necessary to +make the cache data compatible with Kuzzle v2.`); + + const choices = { + abort: "Abort", + copy: "Copy to new keys (obsolete keys will be delete once expired)", + move: "Move keys (cannot be undone, cache won't work with Kuzzle v1 anymore)", + }, + action = await context.inquire.direct({ + choices: Object.values(choices), + default: choices.copy, + message: "Select how the database should be migrated:", + type: "list", + }); + + if (action === choices.abort) { + context.log.error("Aborted by user."); + process.exit(0); + } + + const db = context.config.services.internalCache.database || 0; + + await context.source.select(db); + + const keys = await getSourceKeys(context, "repos/*"); + + for (const key of keys) { + await copyKey(context, key); + + if (action === choices.move) { + await context.source.del(key); + } + } +} + +async function upgradeToTarget(context) { + context.log.notice(` +This script will WIPE TARGET DATABASES from the target cache instance. +Then, it will COPY all data from the source cache instance, without altering it +in any way.`); + + const confirm = await context.inquire.direct({ + default: true, + message: "Continue?", + type: "confirm", + }); + + if (!confirm) { + context.log.error("Aborted by user."); + process.exit(0); + } + + for (const cachedb of ["internalCache", "memoryStorage"]) { + const config = context.config.services[cachedb]; + + await copyDatabase(context, config.database || 0); + } +} + +module.exports = async function upgradeCache(context) { + const cacheContext = await getRedisConnector(context); + + try { + if (cacheContext.inPlace) { + await inPlaceMigration(cacheContext); + } else { + await upgradeToTarget(cacheContext); + } + + cacheContext.log.ok("Cache import complete."); + } catch (e) { + cacheContext.log.error(`Cache import failure: ${e.message}`); + cacheContext.log.print(e.stack); + cacheContext.log.error("Aborted."); + process.exit(1); + } +}; diff --git a/bin/.upgrades/versions/v1/upgradeStorage.js b/bin/.upgrades/versions/v1/upgradeStorage.js new file mode 100644 index 0000000000..391e69d8c8 --- /dev/null +++ b/bin/.upgrades/versions/v1/upgradeStorage.js @@ -0,0 +1,468 @@ +/* + * Kuzzle, a backend software, self-hostable and ready to use + * to power modern apps + * + * Copyright 2015-2022 Kuzzle + * mailto: support AT kuzzle.io + * website: http://kuzzle.io + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +const _ = require("lodash"); + +const getESConnector = require("../../connectors/es"); +const ProgressBar = require("../../lib/progressBar"); + +const INTERNAL_PREFIX = "%"; +const PUBLIC_PREFIX = "&"; +const NAME_SEPARATOR = "."; + +function transformProfile(profile) { + if (!Array.isArray(profile.policies)) { + return profile; + } + + for (const policy of profile.policies.filter((p) => p.restrictedTo)) { + for (const restriction of policy.restrictedTo.filter( + (r) => r.collections, + )) { + restriction.collections = restriction.collections.map((c) => + c.toLowerCase(), + ); + } + } + return profile; +} + +function getNewIndexName(index, collection) { + const prefix = index[0] === "%" ? "" : PUBLIC_PREFIX; + + return `${prefix}${index}${NAME_SEPARATOR}${collection}`; +} + +function fixIndexName(context, index, collection, newIndex) { + const lowercased = newIndex.toLowerCase(); + + if (lowercased !== newIndex) { + // uppercase letters were already forbidden in index names + context.log.warn( + `Index "${index}": collection "${collection}" has been renamed to "${collection.toLowerCase()}"`, + ); + } + + return lowercased; +} + +async function moveData(context, index, collection, newIndex, transform) { + let page = await context.source.search({ + body: { sort: ["_doc"] }, + index, + scroll: "1m", + size: context.argv.storagePageSize, + type: collection, + }); + + const total = page.body.hits.total; + const progressBar = new ProgressBar( + context, + `Importing: ${index}/${collection}`, + total, + ); + let moved = 0; + + while (moved < total) { + const bulk = []; + + for (let i = 0; i < page.body.hits.hits.length; i++) { + const doc = page.body.hits.hits[i]; + + if (doc._source._kuzzle_info) { + delete doc._source._kuzzle_info.active; + delete doc._source._kuzzle_info.deletedAt; + } + + if (transform) { + doc._source = transform(doc._source); + } + + bulk.push({ + create: { + _id: doc._id, + _index: newIndex, + _type: context._type, + }, + }); + bulk.push(doc._source); + } + + await context.target.bulk({ _source: false, body: bulk }); + + moved += page.body.hits.hits.length; + + progressBar.update(moved); + + if (moved < total) { + page = await context.source.scroll({ + scroll: "1m", + scroll_id: page.body._scroll_id, + }); + } + } + + progressBar.destroy(); + return total; +} + +async function upgradeMappings(context, index, collection, newIndex) { + const mappingsResponse = await context.source.indices.getMapping({ + index, + type: collection, + }); + const mappings = mappingsResponse.body[index].mappings[collection]; + + // replace obsolete mapping properties + if (mappings.properties && mappings.properties._kuzzle_info) { + mappings.properties._kuzzle_info = + context.config.services.storageEngine.commonMapping.properties._kuzzle_info; + } + + await context.target.indices.putMapping({ + body: { + _meta: mappings._meta, + dynamic: mappings.dynamic || false, + properties: mappings.properties, + }, + index: newIndex, + type: context._type, + }); +} + +async function createNewIndex(context, newIndex) { + const exists = await context.target.indices.exists({ index: newIndex }); + + if (exists.body) { + await context.target.indices.delete({ index: newIndex }); + } + + await context.target.indices.create({ index: newIndex }); +} + +async function upgrade(context, index, collection, newIndex) { + const fixedIndexName = fixIndexName(context, index, collection, newIndex); + + await createNewIndex(context, fixedIndexName); + await upgradeMappings(context, index, collection, fixedIndexName); + + return moveData(context, index, collection, fixedIndexName); +} + +async function upgradeInternalStorage(context) { + const config = context.config.services.storageEngine.internalIndex; + const index = `${INTERNAL_PREFIX}${config.name}`; + const mapconfig = config.collections; + const collections = { + config: mapconfig.config, + profiles: mapconfig.profiles, + roles: mapconfig.roles, + users: null, + validations: mapconfig.validations, + }; + + for (const [collection, mappings] of Object.entries(collections)) { + const newIndex = getNewIndexName(index, collection); + let total; + + if (mappings) { + await createNewIndex(context, newIndex); + await context.target.indices.putMapping({ + body: mappings, + index: newIndex, + type: context._type, + }); + + total = await moveData( + context, + index, + collection, + newIndex, + collection === "profiles" && transformProfile, + ); + } else { + total = await upgrade(context, index, collection, newIndex); + } + + context.log.ok( + `... migrated internal data: ${collection} (${total} documents)`, + ); + } + + // bootstrap document + await context.target.create({ + body: { version: "2.0.0" }, + id: "internalIndex.dataModelVersion", + index: `${index}.config`, + type: context._type, + }); + + await context.target.create({ + body: { timestamp: Date.now() }, + id: `${config.name}.done`, + index: `${index}.config`, + type: context._type, + }); +} + +async function upgradePluginsStorage(context) { + const { body } = await context.source.cat.indices({ format: "json" }); + const indexes = body + .map((b) => b.index) + .filter((n) => n.startsWith("%plugin:")); + + for (const index of indexes) { + const plugin = index.split(":")[1]; + const newIndexBase = `%plugin-${plugin}${NAME_SEPARATOR}`; + const mappings = await context.source.indices.getMapping({ index }); + const collections = Object.keys(mappings.body[index].mappings); + + for (const collection of collections) { + const newIndex = newIndexBase + collection; + const total = await upgrade(context, index, collection, newIndex); + + context.log.ok( + `... migrated storage for plugin ${plugin}: ${collection} (${total} documents)`, + ); + } + } +} + +async function upgradeAliases(context, upgraded) { + const response = await context.source.indices.getAlias({ + index: Object.keys(upgraded), + }); + + const aliases = {}; + for (const [index, obj] of Object.entries(response.body)) { + if (Object.keys(obj.aliases).length > 0) { + for (const newIndex of upgraded[index].targets) { + aliases[newIndex] = obj.aliases; + } + } + } + + if (Object.keys(aliases).length === 0) { + return; + } + + context.log.notice(` +Index aliases detected. This script can import them to the new structure, but +due to the removal of native collections in Elasticsearch, future aliases will +be duplicated across all of an index upgraded collections.`); + + const choice = await context.inquire.direct({ + default: false, + message: "Upgrade aliases?", + type: "confirm", + }); + + if (!choice) { + return; + } + + for (const [index, obj] of Object.entries(aliases)) { + for (const [name, body] of Object.entries(obj)) { + await context.target.indices.putAlias({ + _type: context._type, + body, + index, + name, + }); + context.log.ok(`...... alias ${name} on index ${index} upgraded`); + } + } +} + +async function upgradeDataStorage(context) { + const { body } = await context.source.cat.indices({ format: "json" }); + const upgraded = {}; + let indexes = body + .map((b) => b.index) + .filter((n) => !n.startsWith(INTERNAL_PREFIX)); + + context.log.notice( + `There are ${indexes.length} data indexes that can be upgraded`, + ); + const choices = { + all: "upgrade all indexes", + askCollection: "choose which collections can be upgraded", + askIndex: "choose which indexes can be upgraded", + skip: "skip all data index upgrades", + }; + + const action = await context.inquire.direct({ + choices: Object.values(choices), + default: choices.all, + message: "You want to", + type: "list", + }); + + if (action === choices.skip) { + return; + } + + if (action === choices.askIndex) { + indexes = await context.inquire.direct({ + choices: indexes.map((i) => ({ checked: true, name: i })), + message: "Select the indexes to upgrade:", + type: "checkbox", + }); + } + + for (const index of indexes) { + const mappings = await context.source.indices.getMapping({ index }); + const allCollections = Object.keys(mappings.body[index].mappings); + let collections = allCollections; + + if (action === choices.askCollection) { + context.log.notice(`Starting to upgrade the index ${index}`); + collections = await context.inquire.direct({ + choices: collections.map((c) => ({ checked: true, name: c })), + message: "Select the collections to upgrade:", + type: "checkbox", + }); + } + + upgraded[index] = { + canBeRemoved: collections.length === allCollections.length, + targets: [], + }; + + for (const collection of collections) { + const newIndex = getNewIndexName(index, collection); + const total = await upgrade(context, index, collection, newIndex); + + upgraded[index].targets.push(newIndex); + context.log.ok( + `... migrated data index ${index}: ${collection} (${total} documents)`, + ); + } + } + + await upgradeAliases(context, upgraded); + + return upgraded; +} + +async function destroyPreviousStructure(context, upgraded) { + // there is no point in destroying the previous structure if not performing + // an in-place migration + if (!context.inPlace) { + return; + } + + const { body } = await context.source.cat.indices({ format: "json" }); + const plugins = body + .map((b) => b.index) + .filter((n) => n.startsWith("%plugin:")); + + let indexes = [ + "%kuzzle", + ...plugins, + ...Object.keys(upgraded).filter((i) => upgraded[i].canBeRemoved), + ]; + + context.log.notice( + "Since this is an in-place migration, the previous structure can be removed.", + ); + context.log.notice( + "(only data indexes with ALL their collections upgraded can be deleted)", + ); + + const choices = { + everything: "Yes - remove all upgraded structures", + internal: "Remove only Kuzzle internal data", + kuzzleAndPlugins: "Remove Kuzzle internal data and plugins storages", + no: "No - keep everything as is", + }; + + const action = await context.inquire.direct({ + choices: Object.values(choices), + default: choices[0], + message: "Destroy? (THIS CANNOT BE REVERTED)", + type: "list", + }); + + if (action === choices.no) { + context.log.ok("Previous structure left intact."); + return; + } + + if (action === choices.kuzzleAndPlugins) { + indexes = ["%kuzzle", ...plugins]; + } else if (action === choices.internal) { + indexes = ["%kuzzle"]; + } + + await context.source.indices.delete({ index: indexes }); + context.log.ok("Previous structure destroyed."); +} + +module.exports = async function upgradeStorage(context) { + const storageContext = await getESConnector(context); + const targetInfo = await storageContext.target.info(); + const targetMajor = targetInfo.body.version.number.split(".")[0]; + + storageContext._type = + storageContext.inPlace && targetMajor === "5" ? "default" : undefined; + + context.log.notice(` +This script will now start *COPYING* the existing data to the target storage +space. +If the upgrade is interrupted, this script can be replayed any number of times. +Existing data from the older version of Kuzzle will be unaffected, but if +Kuzzle indexes already exist in the target storage space, they will be +overwritten without notice.`); + + const confirm = await context.inquire.direct({ + default: true, + message: "Continue?", + type: "confirm", + }); + + if (!confirm) { + context.log.error("Aborted by user."); + process.exit(0); + } + + try { + await upgradeInternalStorage(storageContext); + await upgradePluginsStorage(storageContext); + const upgraded = await upgradeDataStorage(storageContext); + + storageContext.log.ok("Storage migration complete."); + await destroyPreviousStructure(storageContext, upgraded); + } catch (e) { + storageContext.log.error(`Storage upgrade failure: ${e.message}`); + + const reason = _.get(e, "meta.body.error.reason"); + if (reason) { + storageContext.log.error(`Reason: ${reason}`); + } + + storageContext.log.print(e.stack); + storageContext.log.error("Aborted."); + process.exit(1); + } +}; diff --git a/docker-compose.override.yml b/docker-compose.override.yml new file mode 100644 index 0000000000..52fad5805f --- /dev/null +++ b/docker-compose.override.yml @@ -0,0 +1,23 @@ +version: '3.8' + +services: + elasticsearch: + image: elasticsearch:8.11.3 + container_name: kuzzle_elasticsearch + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true + ports: + - '9200:9200' + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:9200'] + interval: 2s + timeout: 2s + retries: 10 + ulimits: + nofile: 65536 diff --git a/docker-compose.yml b/docker-compose.yml index 09d239c3cd..d7c924c923 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -66,16 +66,7 @@ services: retries: 30 elasticsearch: - image: elasticsearch:8.11.3 - container_name: kuzzle_elasticsearch - environment: - - xpack.security.enabled=false - - action.destructive_requires_name=false - - cluster.name=kuzzle - - node.name=alyx - - discovery.type=single-node - - ingest.geoip.downloader.enabled=false - - indices.id_field_data.enabled=true + image: kuzzleio/elasticsearch:7 ports: - '9200:9200' healthcheck: From 43eeeba05988b73acf57a1c3233db362a31835c8 Mon Sep 17 00:00:00 2001 From: rolljee Date: Thu, 28 Mar 2024 09:39:03 +0100 Subject: [PATCH 43/59] docs(docs): update documentation with a general migration guide --- doc/2/guides/elasticsearch/v8/index.md | 40 +++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/doc/2/guides/elasticsearch/v8/index.md b/doc/2/guides/elasticsearch/v8/index.md index 47fde9a2e9..078ee9569b 100644 --- a/doc/2/guides/elasticsearch/v8/index.md +++ b/doc/2/guides/elasticsearch/v8/index.md @@ -115,4 +115,42 @@ services: nofile: 65536 ``` -Or you can run `kourou app:scaffold sandbox` to create a new Kuzzle project with a `docker-compose.yml` file that uses Elasticsearch 8. \ No newline at end of file +Or you can run `kourou app:scaffold sandbox` to create a new Kuzzle project with a `docker-compose.yml` file that uses Elasticsearch 8. + +## Migrating to V8 + +Migration Guide from Elasticsearch 7.x to Elasticsearch 8.x + +### Prerequisites + +Before starting the migration process, ensure the following: +* __Backup your data__: Always backup your indices and cluster settings before starting the migration. Use the Snapshot and Restore feature for this. +* __Version Check__: Make sure your Elasticsearch 7.x is at the latest minor version. Elasticsearch supports migrating from the last minor version of the previous major version. + +### Check Deprecation API + +* Elasticsearch Deprecation API can be used to check for any features or settings in your current cluster that are deprecated or removed in the 8.x version. Address these issues before proceeding. +* Test in a Non-Production Environment +Conduct a dry run in a development environment to spot potential issues and estimate the duration the migration process might take. + +### Migration Methods + +1. Re-indexing + * Step 1: Create a new cluster running Elasticsearch 8.x. + * Step 2: Take a snapshot of your data in the current 7.x cluster. + * Step 3: Restore the snapshot into the new 8.x cluster. +1. Rolling Upgrade + * Step 1: Disable Shard Allocation. + * Step 2: Stop and upgrade a single Elasticsearch node. + * Step 3: Enable Shard Allocation and allow the node to join the cluster and the cluster to re-balance. + * Step 4: Repeat for each node in the cluster. +1. Post Upgrade Checks + * Run the health and stats APIs to ensure the health of your newly upgraded cluster. + * Update your clients and integrations to the latest version that's compatible with Elasticsearch 8.x, if not done already. + * Monitor your cluster using the Monitoring API or third-party monitoring services. +1. Troubleshoot + * If you encounter any issues during the migration process, take advantage of the Elasticsearch documentation, forums, and issue trackers for troubleshooting information and support. + +> Note: Migration steps can vary depending on your setup and needs. Always refer to the official Elasticsearch documentation for the most accurate information, you can find it [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html). + +Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a non-production environment before applying them to production. Consulting with a database/DevOps expert is also recommended for unique implementations. \ No newline at end of file From 4d411082d595584ff6cc71c88e4af352f7f5c551 Mon Sep 17 00:00:00 2001 From: rolljee Date: Thu, 28 Mar 2024 09:41:59 +0100 Subject: [PATCH 44/59] docs(documentation): update the migration guide --- doc/2/guides/elasticsearch/v8/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/2/guides/elasticsearch/v8/index.md b/doc/2/guides/elasticsearch/v8/index.md index 078ee9569b..844a2cc111 100644 --- a/doc/2/guides/elasticsearch/v8/index.md +++ b/doc/2/guides/elasticsearch/v8/index.md @@ -153,4 +153,4 @@ Conduct a dry run in a development environment to spot potential issues and esti > Note: Migration steps can vary depending on your setup and needs. Always refer to the official Elasticsearch documentation for the most accurate information, you can find it [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html). -Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a non-production environment before applying them to production. Consulting with a database/DevOps expert is also recommended for unique implementations. \ No newline at end of file +Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a non-production environment before applying them to production. From 7b609e6be831dac67a287fd319bc6a5af25632f1 Mon Sep 17 00:00:00 2001 From: Eric Trousset Date: Tue, 14 May 2024 12:10:32 +0200 Subject: [PATCH 45/59] Added documentation on how to upgrade project to elasticsearch in dev env --- .../migrate-project-from-v7-to-v8/index.md | 183 ++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100644 doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md diff --git a/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md b/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md new file mode 100644 index 0000000000..431749b667 --- /dev/null +++ b/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md @@ -0,0 +1,183 @@ +--- +code: false +type: page +order: 50 +title: Elasticsearch 8 | Develop on Kuzzle | Guide | Core +meta: + - name: description + content: Extend Kuzzle API with controllers and actions + - name: keywords + content: Kuzzle, Documentation, kuzzle write pluggins, General purpose backend, iot, backend, opensource, API Controllers +--- + +# Migrate Elasticsearch 8 + + + +Kuzzle relies on Elasticsearch as a [NoSQL document store](/core/2/guides/main-concepts/data-storage). + +The support of Elasticsearch 8 has been introduced in Kuzzle 2.30.0. + +To avoid any breaking changes around the support of Elasticsearch 8, we kept Kuzzle working seemlessly with Elasticsearch 7 and Elasticsearch 8. + +The use of Elasticsearch 8 is an **opt-in** option, so no modification is needed on your behalf if you want to keep using Elasticsearch 7. + +The default major version of Elasticsearch will be 7 until the next major version of Kuzzle that would ne Kuzzle v3. + +# How to setup your project to use Elasticsearch 8 + +The new configuration key has been introduce to select the Eleasticsearch version you want to support for your project. +When not specified, it we be considered as version 7. +Specify 8 if you want to switch Kuzzle to support Elasticasearch 8. + +This has to be add to you kuzzlerc file, or provided via an environnement variable (see RC doc for details on kuzzlerc configation options) + +```json +{ + "services": { + "storageEngine": { + "majorVersion": 8 + } + } +} +``` + +:::warning +You can not set the `majorVersion` key to 8 if you are using a version of Kuzzle that does not support it. +::: + +:::info +Kuzzle cannot connect to both Elasticsearch 7 and Elasticsearch 8 at the same time. +::: + +Once the version is set to 8, Kuzzle will use the Elasticsearch 8 API to communicate with the database. + +Next you will have to change the docker-compose.yml file so that it pulls Elasticsearch 8 image with the recommanded confiuration to work with Kuzzle: + +```yaml + elasticsearch: + image: elasticsearch:8.11.3 + container_name: kuzzle_elasticsearch + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true + ports: + - '9200:9200' + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:9200'] + interval: 2s + timeout: 2s + retries: 10 + ulimits: + nofile: 65536 +``` + +You will find below an example of a `docker-compose.yml` file to run Kuzzle with Elasticsearch 8. + +```yaml +version: '3.8' + +services: + node: + image: kuzzleio/kuzzle:2 + depends_on: + redis: + condition: service_healthy + elasticsearch: + condition: service_healthy + ports: + - "7512:7512" + - "7511:7511" + - "7510:7510" + - "9229:9229" + - "1883:1883" + environment: + - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 + - kuzzle_services__storageEngine__commonMapping__dynamic=true + - kuzzle_services__internalCache__node__host=redis + - kuzzle_services__memoryStorage__node__host=redis + - NODE_ENV=${NODE_ENV:-development} + - DEBUG=${DEBUG:-none} + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:7512/_healthcheck"] + timeout: 10s + interval: 10s + retries: 30 + start_period: 1m + + redis: + image: redis:6 + ports: + - '6379:6379' + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] + interval: 1s + timeout: 3s + retries: 30 + + elasticsearch: + image: elasticsearch:8.11.3 + container_name: kuzzle_elasticsearch + environment: + - xpack.security.enabled=false + - action.destructive_requires_name=false + - cluster.name=kuzzle + - node.name=alyx + - discovery.type=single-node + - ingest.geoip.downloader.enabled=false + - indices.id_field_data.enabled=true + ports: + - '9200:9200' + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:9200'] + interval: 2s + timeout: 2s + retries: 10 + ulimits: + nofile: 65536 +``` + +Or you can run `kourou app:scaffold sandbox` to create a new Kuzzle project with a `docker-compose.yml` file that uses Elasticsearch 8. + +## Migrating to V8 + +Migration Guide from Elasticsearch 7.x to Elasticsearch 8.x + +### Prerequisites + +Before starting the migration process, ensure the following: +* __Backup your data__: Always backup your indices and cluster settings before starting the migration. Use the Snapshot and Restore feature for this. +* __Version Check__: Make sure your Elasticsearch 7.x is at the latest minor version. Elasticsearch supports migrating from the last minor version of the previous major version. + +### Check Deprecation API + +* Elasticsearch Deprecation API can be used to check for any features or settings in your current cluster that are deprecated or removed in the 8.x version. Address these issues before proceeding. +* Test in a Non-Production Environment +Conduct a dry run in a development environment to spot potential issues and estimate the duration the migration process might take. + +### Migration Methods + +1. Re-indexing + * Step 1: Create a new cluster running Elasticsearch 8.x. + * Step 2: Take a snapshot of your data in the current 7.x cluster. + * Step 3: Restore the snapshot into the new 8.x cluster. +1. Rolling Upgrade + * Step 1: Disable Shard Allocation. + * Step 2: Stop and upgrade a single Elasticsearch node. + * Step 3: Enable Shard Allocation and allow the node to join the cluster and the cluster to re-balance. + * Step 4: Repeat for each node in the cluster. +1. Post Upgrade Checks + * Run the health and stats APIs to ensure the health of your newly upgraded cluster. + * Update your clients and integrations to the latest version that's compatible with Elasticsearch 8.x, if not done already. + * Monitor your cluster using the Monitoring API or third-party monitoring services. +1. Troubleshoot + * If you encounter any issues during the migration process, take advantage of the Elasticsearch documentation, forums, and issue trackers for troubleshooting information and support. + +> Note: Migration steps can vary depending on your setup and needs. Always refer to the official Elasticsearch documentation for the most accurate information, you can find it [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html). + +Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a non-production environment before applying them to production. From f0871603950f8ca5d09a8423f2cd3859d7a30c50 Mon Sep 17 00:00:00 2001 From: Eric Trousset Date: Tue, 14 May 2024 12:13:46 +0200 Subject: [PATCH 46/59] Added more documentation on how to upgrade project to elasticsearch in dev env --- .../migrate-project-from-v7-to-v8/index.md | 127 +++--------------- doc/2/guides/elasticsearch/v8/index.md | 13 +- 2 files changed, 27 insertions(+), 113 deletions(-) diff --git a/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md b/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md index 431749b667..5a2640450f 100644 --- a/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md +++ b/doc/2/guides/elasticsearch/migrate-project-from-v7-to-v8/index.md @@ -10,7 +10,7 @@ meta: content: Kuzzle, Documentation, kuzzle write pluggins, General purpose backend, iot, backend, opensource, API Controllers --- -# Migrate Elasticsearch 8 +# Migrate a project from Elasticsearch 7 to Elasticsearch 8 @@ -24,11 +24,17 @@ The use of Elasticsearch 8 is an **opt-in** option, so no modification is needed The default major version of Elasticsearch will be 7 until the next major version of Kuzzle that would ne Kuzzle v3. -# How to setup your project to use Elasticsearch 8 +## How to setup your project to use Elasticsearch 8 -The new configuration key has been introduce to select the Eleasticsearch version you want to support for your project. -When not specified, it we be considered as version 7. -Specify 8 if you want to switch Kuzzle to support Elasticasearch 8. +### Setup Kuzzle to use Elasticsearch 8 + +#### Upgrade the npm package +First you need to upgrade you Kuzzle package to version `>= 2.30.0-es8` in the `package.json` file. Then run `npm install` to upgrade the packages for you application. + +### Configure Kuzzle +A new configuration key `majorVersion` has been introduced ine the `storageEngine` section to allow the selection of the Eleasticsearch version you want to support for your project. + +When not specified, it will be considered to be version 7, specify 8 if you want to switch Kuzzle to support Elasticasearch 8. This has to be add to you kuzzlerc file, or provided via an environnement variable (see RC doc for details on kuzzlerc configation options) @@ -43,7 +49,7 @@ This has to be add to you kuzzlerc file, or provided via an environnement variab ``` :::warning -You can not set the `majorVersion` key to 8 if you are using a version of Kuzzle that does not support it. +You can not set the `majorVersion` key to 8 if you are using a version of Kuzzle that does not support it. (older versions of Kuzzle won't complain about this value) ::: :::info @@ -52,75 +58,14 @@ Kuzzle cannot connect to both Elasticsearch 7 and Elasticsearch 8 at the same ti Once the version is set to 8, Kuzzle will use the Elasticsearch 8 API to communicate with the database. -Next you will have to change the docker-compose.yml file so that it pulls Elasticsearch 8 image with the recommanded confiuration to work with Kuzzle: +### Launch Elasticsearch 8 (dev environnement) -```yaml - elasticsearch: - image: elasticsearch:8.11.3 - container_name: kuzzle_elasticsearch - environment: - - xpack.security.enabled=false - - action.destructive_requires_name=false - - cluster.name=kuzzle - - node.name=alyx - - discovery.type=single-node - - ingest.geoip.downloader.enabled=false - - indices.id_field_data.enabled=true - ports: - - '9200:9200' - healthcheck: - test: ['CMD', 'curl', '-f', 'http://localhost:9200'] - interval: 2s - timeout: 2s - retries: 10 - ulimits: - nofile: 65536 -``` +Next you will have to change the docker-compose.yml file so that it pulls Elasticsearch 8 image with the recommanded configuration to work with Kuzzle -You will find below an example of a `docker-compose.yml` file to run Kuzzle with Elasticsearch 8. +You can replace the original `elasticsearch` section with the following exemple ```yaml -version: '3.8' - -services: - node: - image: kuzzleio/kuzzle:2 - depends_on: - redis: - condition: service_healthy - elasticsearch: - condition: service_healthy - ports: - - "7512:7512" - - "7511:7511" - - "7510:7510" - - "9229:9229" - - "1883:1883" - environment: - - kuzzle_services__storageEngine__client__node=http://elasticsearch:9200 - - kuzzle_services__storageEngine__commonMapping__dynamic=true - - kuzzle_services__internalCache__node__host=redis - - kuzzle_services__memoryStorage__node__host=redis - - NODE_ENV=${NODE_ENV:-development} - - DEBUG=${DEBUG:-none} - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:7512/_healthcheck"] - timeout: 10s - interval: 10s - retries: 30 - start_period: 1m - - redis: - image: redis:6 - ports: - - '6379:6379' - healthcheck: - test: ['CMD', 'redis-cli', 'ping'] - interval: 1s - timeout: 3s - retries: 30 - - elasticsearch: + elasticsearch: image: elasticsearch:8.11.3 container_name: kuzzle_elasticsearch environment: @@ -142,42 +87,8 @@ services: nofile: 65536 ``` -Or you can run `kourou app:scaffold sandbox` to create a new Kuzzle project with a `docker-compose.yml` file that uses Elasticsearch 8. - -## Migrating to V8 - -Migration Guide from Elasticsearch 7.x to Elasticsearch 8.x - -### Prerequisites - -Before starting the migration process, ensure the following: -* __Backup your data__: Always backup your indices and cluster settings before starting the migration. Use the Snapshot and Restore feature for this. -* __Version Check__: Make sure your Elasticsearch 7.x is at the latest minor version. Elasticsearch supports migrating from the last minor version of the previous major version. - -### Check Deprecation API - -* Elasticsearch Deprecation API can be used to check for any features or settings in your current cluster that are deprecated or removed in the 8.x version. Address these issues before proceeding. -* Test in a Non-Production Environment -Conduct a dry run in a development environment to spot potential issues and estimate the duration the migration process might take. - -### Migration Methods - -1. Re-indexing - * Step 1: Create a new cluster running Elasticsearch 8.x. - * Step 2: Take a snapshot of your data in the current 7.x cluster. - * Step 3: Restore the snapshot into the new 8.x cluster. -1. Rolling Upgrade - * Step 1: Disable Shard Allocation. - * Step 2: Stop and upgrade a single Elasticsearch node. - * Step 3: Enable Shard Allocation and allow the node to join the cluster and the cluster to re-balance. - * Step 4: Repeat for each node in the cluster. -1. Post Upgrade Checks - * Run the health and stats APIs to ensure the health of your newly upgraded cluster. - * Update your clients and integrations to the latest version that's compatible with Elasticsearch 8.x, if not done already. - * Monitor your cluster using the Monitoring API or third-party monitoring services. -1. Troubleshoot - * If you encounter any issues during the migration process, take advantage of the Elasticsearch documentation, forums, and issue trackers for troubleshooting information and support. +### Data migration -> Note: Migration steps can vary depending on your setup and needs. Always refer to the official Elasticsearch documentation for the most accurate information, you can find it [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html). +In the context of running the project in a development envinronnement, you can run your usual initialisation scripts as usual, or use Kourou to dump data from the project still running on Elasticsearch 7 and import them when you are done with setuping the project to run with Elasticsearch 8. -Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a non-production environment before applying them to production. +In the context of an hosted environment such as pre-prodcution or production environnement, we recommand following this guide. diff --git a/doc/2/guides/elasticsearch/v8/index.md b/doc/2/guides/elasticsearch/v8/index.md index 844a2cc111..8f145f518c 100644 --- a/doc/2/guides/elasticsearch/v8/index.md +++ b/doc/2/guides/elasticsearch/v8/index.md @@ -117,9 +117,9 @@ services: Or you can run `kourou app:scaffold sandbox` to create a new Kuzzle project with a `docker-compose.yml` file that uses Elasticsearch 8. -## Migrating to V8 +## Migrating production data from Elasticsearch 7 to 8 -Migration Guide from Elasticsearch 7.x to Elasticsearch 8.x +Is this section, we will see how to migration en production environnement data from Elasticsearch 7.x to Elasticsearch 8.x ### Prerequisites @@ -130,11 +130,12 @@ Before starting the migration process, ensure the following: ### Check Deprecation API * Elasticsearch Deprecation API can be used to check for any features or settings in your current cluster that are deprecated or removed in the 8.x version. Address these issues before proceeding. -* Test in a Non-Production Environment +* Test in a **Non-Production Environment** Conduct a dry run in a development environment to spot potential issues and estimate the duration the migration process might take. ### Migration Methods +Theire are 2 strategies to upgrade Elasticsearch in a production environment: 1. Re-indexing * Step 1: Create a new cluster running Elasticsearch 8.x. * Step 2: Take a snapshot of your data in the current 7.x cluster. @@ -144,13 +145,15 @@ Conduct a dry run in a development environment to spot potential issues and esti * Step 2: Stop and upgrade a single Elasticsearch node. * Step 3: Enable Shard Allocation and allow the node to join the cluster and the cluster to re-balance. * Step 4: Repeat for each node in the cluster. + +After you have migrated your data: 1. Post Upgrade Checks * Run the health and stats APIs to ensure the health of your newly upgraded cluster. * Update your clients and integrations to the latest version that's compatible with Elasticsearch 8.x, if not done already. * Monitor your cluster using the Monitoring API or third-party monitoring services. 1. Troubleshoot - * If you encounter any issues during the migration process, take advantage of the Elasticsearch documentation, forums, and issue trackers for troubleshooting information and support. + * If you encounter any issues during the migration process, take advantage of the Elasticsearch documentation, forums, and issue trackers for troubleshooting information and support. > Note: Migration steps can vary depending on your setup and needs. Always refer to the official Elasticsearch documentation for the most accurate information, you can find it [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html). -Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a non-production environment before applying them to production. +Disclaimer: The above steps provide a general migration guide. Migrations can be complex and it's advised to always test these steps in a **non-production environment** before applying them to production. From e5e26836be904ced16040470bafd68329d6f9ade Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 14 May 2024 15:38:20 +0200 Subject: [PATCH 47/59] chore(import): fixing import issues --- lib/core/plugin/pluginContext.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/core/plugin/pluginContext.ts b/lib/core/plugin/pluginContext.ts index d5b1000684..91d714bbe3 100644 --- a/lib/core/plugin/pluginContext.ts +++ b/lib/core/plugin/pluginContext.ts @@ -23,13 +23,13 @@ import Bluebird from "bluebird"; import { Koncorde } from "../shared/KoncordeWrapper"; import { JSONObject } from "kuzzle-sdk"; - import { KuzzleRequest, Request, RequestContext, RequestInput, } from "../../../index"; + import * as kerror from "../../kerror"; import { BadRequestError, @@ -47,7 +47,7 @@ import { TooManyRequestsError, UnauthorizedError, } from "../../kerror/errors"; -import { Elasticsearch } from "../../service/storage/elasticsearch"; +import { Elasticsearch } from "../../service/storage/Elasticsearch"; import { Mutex } from "../../util/mutex"; import Promback from "../../util/promback"; import { isPlainObject } from "../../util/safeObject"; From 50f2cac418f1616ec1c9c68ef149f8072a7a45e4 Mon Sep 17 00:00:00 2001 From: rolljee Date: Tue, 14 May 2024 18:47:44 +0200 Subject: [PATCH 48/59] fix(tests): fixing unit tests --- .gitignore | 5 +++++ test/mocks/service/elasticsearchClient.mock.js | 10 +++++----- test/service/storage/elasticsearch-8.test.js | 8 ++++---- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/.gitignore b/.gitignore index 043c764104..c0bdde1c55 100644 --- a/.gitignore +++ b/.gitignore @@ -104,6 +104,7 @@ lib/api/request/requestContext.js lib/api/request/requestInput.js lib/api/request/requestResponse.js lib/cluster/idCardHandler.js +lib/core/cache/cacheDbEnum.js lib/cluster/state.js lib/cluster/workers/IDCardRenewer.js lib/config/default.config.js @@ -138,6 +139,10 @@ lib/core/shared/KoncordeWrapper.js lib/core/shared/repository.js lib/core/shared/sdk/embeddedSdk.js lib/core/shared/sdk/funnelProtocol.js +lib/core/shared/ObjectRepository.js +lib/core/shared/store.js +lib/core/storage/storeScopeEnum.js +lib/types/shared/StoreCollectionsDefinition.js lib/core/storage/indexCache.js lib/kerror/errors/*.js lib/kerror/errors/badRequestError.js diff --git a/test/mocks/service/elasticsearchClient.mock.js b/test/mocks/service/elasticsearchClient.mock.js index 1066ffce94..d3eb5beeb4 100644 --- a/test/mocks/service/elasticsearchClient.mock.js +++ b/test/mocks/service/elasticsearchClient.mock.js @@ -41,20 +41,20 @@ class ElasticsearchClientMock { }; this.indices = { - open: sinon.stub().resolves(), close: sinon.stub().resolves(), - putSettings: sinon.stub().resolves(), - getSettings: sinon.stub().resolves(), create: sinon.stub().resolves(), delete: sinon.stub().resolves(), exists: sinon.stub().resolves(), existsType: sinon.stub().resolves(), + get: sinon.stub().resolves(), + getAlias: sinon.stub().resolves(), getMapping: sinon.stub().resolves(), + getSettings: sinon.stub().resolves(), + open: sinon.stub().resolves(), putMapping: sinon.stub().resolves(), + putSettings: sinon.stub().resolves(), refresh: sinon.stub().resolves(), stats: sinon.stub().resolves(), - get: sinon.stub().resolves(), - getAlias: sinon.stub().resolves(), updateAliases: sinon.stub().resolves(), }; diff --git a/test/service/storage/elasticsearch-8.test.js b/test/service/storage/elasticsearch-8.test.js index 8564a44e1b..3f9146dc01 100644 --- a/test/service/storage/elasticsearch-8.test.js +++ b/test/service/storage/elasticsearch-8.test.js @@ -14,7 +14,7 @@ const { const KuzzleMock = require("../../mocks/kuzzle.mock"); const ESClientMock = require("../../mocks/service/elasticsearchClient.mock"); -const scopeEnum = require("../../../lib/core/storage/storeScopeEnum"); +const { storeScopeEnum } = require("../../../lib/core/storage/storeScopeEnum"); const { Mutex } = require("../../../lib/util/mutex"); describe("Test: ElasticSearch service", () => { @@ -74,11 +74,11 @@ describe("Test: ElasticSearch service", () => { it("should initialize properties", () => { const esInternal = new ES( kuzzle.config.services.storageEngine, - scopeEnum.PRIVATE, + storeScopeEnum.PRIVATE, ); sinon.stub(esInternal.client, "waitForElasticsearch").resolves(); - esInternal.client._client = new ESClientMock("7.0.0"); + esInternal.client._client = new ESClientMock("8.0.0"); should(elasticsearch.config).be.exactly( kuzzle.config.services.storageEngine, @@ -5294,7 +5294,7 @@ describe("Test: ElasticSearch service", () => { publicES = new ES(kuzzle.config.services.storageEngine); internalES = new ES( kuzzle.config.services.storageEngine, - scopeEnum.PRIVATE, + storeScopeEnum.PRIVATE, ); sinon.stub(publicES.client, "waitForElasticsearch").resolves(); From 8286522368ee70efd54a6d631a5917f02d96b952 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 15 May 2024 10:33:53 +0200 Subject: [PATCH 49/59] fix(es8): update deployment to allow elasticsearch-8 package to be deployed as a pre-release --- .github/workflows/workflow-deployments.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workflow-deployments.yml b/.github/workflows/workflow-deployments.yml index e212299f49..b70e780fce 100644 --- a/.github/workflows/workflow-deployments.yml +++ b/.github/workflows/workflow-deployments.yml @@ -84,7 +84,7 @@ jobs: npm-deploy: name: Build and deploy release on NPM.js - if: ${{ (github.event_name != 'workflow_dispatch' || inputs.npm_deploy) && (github.ref_name == 'master' || github.ref_name == 'beta') }} + if: ${{ (github.event_name != 'workflow_dispatch' || inputs.npm_deploy) && (github.ref_name == 'master' || github.ref_name == 'beta' || github.ref_name == 'elasticsearch-8') }} runs-on: ubuntu-22.04 permissions: contents: write From e59cedd2a2404e315024f18eb3823af03e341411 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 15 May 2024 10:35:47 +0200 Subject: [PATCH 50/59] fix(es8): add elasticsearch-8 in listened branches --- .github/workflows/workflow.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 94255a978a..156da075e7 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -5,6 +5,7 @@ on: branches: - master - beta + - elasticsearch-8 pull_request: env: From 2fc9cc2bcb50bdcc629820074267b8085d466434 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Wed, 15 May 2024 08:54:01 +0000 Subject: [PATCH 51/59] chore(release): 2.31.0-elasticsearch-8.1 [skip ci] # [2.31.0-elasticsearch-8.1](https://github.com/kuzzleio/kuzzle/compare/v2.30.0...v2.31.0-elasticsearch-8.1) (2024-05-15) ### Bug Fixes * **conflicts:** merge conflict ([e763392](https://github.com/kuzzleio/kuzzle/commit/e76339261029262aac31af972dc81f05a082e469)) * **es8:** add elasticsearch-8 in listened branches ([e59cedd](https://github.com/kuzzleio/kuzzle/commit/e59cedd2a2404e315024f18eb3823af03e341411)) * **es8:** update deployment to allow elasticsearch-8 package to be deployed as a pre-release ([8286522](https://github.com/kuzzleio/kuzzle/commit/8286522368ee70efd54a6d631a5917f02d96b952)) * **storage:** update more types ([50bfe58](https://github.com/kuzzleio/kuzzle/commit/50bfe580db9e86d6e1de761605ac86debdae2e48)) * **tests:** fixing unit tests ([50f2cac](https://github.com/kuzzleio/kuzzle/commit/50f2cac418f1616ec1c9c68ef149f8072a7a45e4)) ### Features * **elasticsearch8:** tests unit are now okay ([1f4d1e8](https://github.com/kuzzleio/kuzzle/commit/1f4d1e8686c3f460496f8a73d983371a173d3a14)) * **es8:** elasticsearch 8 unit tests & functional tests running ([bdcce96](https://github.com/kuzzleio/kuzzle/commit/bdcce96fc97ff58143f29484ae1da2076ee2e885)) * **es8:** support both es7 and es8 ([e12c35a](https://github.com/kuzzleio/kuzzle/commit/e12c35af6b3a8d375005177cdf56509396db5cb4)) * only support ES 8.x ([4a8038e](https://github.com/kuzzleio/kuzzle/commit/4a8038e7729a9cdf5b9d7b2c0540899f8911d11c)) * **storage:** add more types, fix some requests ([a18d454](https://github.com/kuzzleio/kuzzle/commit/a18d454b36d5fe565ad6b08a772c13c3e6a16bab)) * **storage:** upgrade to the Elasticsearch 8 client ([6753640](https://github.com/kuzzleio/kuzzle/commit/675364013e3b07fc665bfea70e2489b28bad0d8e)) --- CHANGELOG.md | 21 +++++++++++++++++++++ package-lock.json | 4 ++-- package.json | 2 +- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e03357e979..64f6d83f34 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,24 @@ +# [2.31.0-elasticsearch-8.1](https://github.com/kuzzleio/kuzzle/compare/v2.30.0...v2.31.0-elasticsearch-8.1) (2024-05-15) + + +### Bug Fixes + +* **conflicts:** merge conflict ([e763392](https://github.com/kuzzleio/kuzzle/commit/e76339261029262aac31af972dc81f05a082e469)) +* **es8:** add elasticsearch-8 in listened branches ([e59cedd](https://github.com/kuzzleio/kuzzle/commit/e59cedd2a2404e315024f18eb3823af03e341411)) +* **es8:** update deployment to allow elasticsearch-8 package to be deployed as a pre-release ([8286522](https://github.com/kuzzleio/kuzzle/commit/8286522368ee70efd54a6d631a5917f02d96b952)) +* **storage:** update more types ([50bfe58](https://github.com/kuzzleio/kuzzle/commit/50bfe580db9e86d6e1de761605ac86debdae2e48)) +* **tests:** fixing unit tests ([50f2cac](https://github.com/kuzzleio/kuzzle/commit/50f2cac418f1616ec1c9c68ef149f8072a7a45e4)) + + +### Features + +* **elasticsearch8:** tests unit are now okay ([1f4d1e8](https://github.com/kuzzleio/kuzzle/commit/1f4d1e8686c3f460496f8a73d983371a173d3a14)) +* **es8:** elasticsearch 8 unit tests & functional tests running ([bdcce96](https://github.com/kuzzleio/kuzzle/commit/bdcce96fc97ff58143f29484ae1da2076ee2e885)) +* **es8:** support both es7 and es8 ([e12c35a](https://github.com/kuzzleio/kuzzle/commit/e12c35af6b3a8d375005177cdf56509396db5cb4)) +* only support ES 8.x ([4a8038e](https://github.com/kuzzleio/kuzzle/commit/4a8038e7729a9cdf5b9d7b2c0540899f8911d11c)) +* **storage:** add more types, fix some requests ([a18d454](https://github.com/kuzzleio/kuzzle/commit/a18d454b36d5fe565ad6b08a772c13c3e6a16bab)) +* **storage:** upgrade to the Elasticsearch 8 client ([6753640](https://github.com/kuzzleio/kuzzle/commit/675364013e3b07fc665bfea70e2489b28bad0d8e)) + # [2.30.0](https://github.com/kuzzleio/kuzzle/compare/v2.29.1...v2.30.0) (2024-05-07) diff --git a/package-lock.json b/package-lock.json index e89ad23ef7..a46ccf8625 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "kuzzle", - "version": "2.30.0", + "version": "2.31.0-elasticsearch-8.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "kuzzle", - "version": "2.30.0", + "version": "2.31.0-elasticsearch-8.1", "license": "Apache-2.0", "dependencies": { "aedes": "0.46.3", diff --git a/package.json b/package.json index c8eeaf3ef6..0e9da2d7b2 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "kuzzle", "author": "The Kuzzle Team ", - "version": "2.30.0", + "version": "2.31.0-elasticsearch-8.1", "description": "Kuzzle is an open-source solution that handles all the data management through a secured API, with a large choice of protocols.", "bin": "bin/start-kuzzle-server", "scripts": { From d3263d1917d94d2dc99baf5acceb654e0c3cd6d8 Mon Sep 17 00:00:00 2001 From: Cyril Nguyen Date: Fri, 21 Jun 2024 16:27:00 +0200 Subject: [PATCH 52/59] docs(log): add a log to know which ES configuration is set up --- lib/service/storage/Elasticsearch.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/service/storage/Elasticsearch.ts b/lib/service/storage/Elasticsearch.ts index 2bb31e299b..f83d713802 100644 --- a/lib/service/storage/Elasticsearch.ts +++ b/lib/service/storage/Elasticsearch.ts @@ -13,6 +13,8 @@ export class Elasticsearch extends Service { constructor(config: any, scope = storeScopeEnum.PUBLIC) { super("elasticsearch", config); + global.kuzzle.log.info(`[ℹ] Elasticsearch configuration is set to major version : ${config.majorVersion}`); + if (config.majorVersion === "7") { this.client = new ES7(config, scope); } else if (config.majorVersion === "8") { From f849e2c9d2aff5a3636a0b2fc0d11ac86561277a Mon Sep 17 00:00:00 2001 From: Cyril Nguyen Date: Mon, 24 Jun 2024 10:18:20 +0200 Subject: [PATCH 53/59] fix(ci): indent to pass linter --- lib/service/storage/Elasticsearch.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/service/storage/Elasticsearch.ts b/lib/service/storage/Elasticsearch.ts index f83d713802..546cce9c60 100644 --- a/lib/service/storage/Elasticsearch.ts +++ b/lib/service/storage/Elasticsearch.ts @@ -13,7 +13,9 @@ export class Elasticsearch extends Service { constructor(config: any, scope = storeScopeEnum.PUBLIC) { super("elasticsearch", config); - global.kuzzle.log.info(`[ℹ] Elasticsearch configuration is set to major version : ${config.majorVersion}`); + global.kuzzle.log.info( + `[ℹ] Elasticsearch configuration is set to major version : ${config.majorVersion}`, + ); if (config.majorVersion === "7") { this.client = new ES7(config, scope); From f651632879ea68e34957b809b75a5f356bca92e0 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Mon, 24 Jun 2024 09:49:10 +0000 Subject: [PATCH 54/59] chore(release): 2.31.0-elasticsearch-8.2 [skip ci] # [2.31.0-elasticsearch-8.2](https://github.com/kuzzleio/kuzzle/compare/v2.31.0-elasticsearch-8.1...v2.31.0-elasticsearch-8.2) (2024-06-24) ### Bug Fixes * **ci:** indent to pass linter ([f849e2c](https://github.com/kuzzleio/kuzzle/commit/f849e2c9d2aff5a3636a0b2fc0d11ac86561277a)) --- CHANGELOG.md | 7 +++++++ package-lock.json | 4 ++-- package.json | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64f6d83f34..7c29952093 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +# [2.31.0-elasticsearch-8.2](https://github.com/kuzzleio/kuzzle/compare/v2.31.0-elasticsearch-8.1...v2.31.0-elasticsearch-8.2) (2024-06-24) + + +### Bug Fixes + +* **ci:** indent to pass linter ([f849e2c](https://github.com/kuzzleio/kuzzle/commit/f849e2c9d2aff5a3636a0b2fc0d11ac86561277a)) + # [2.31.0-elasticsearch-8.1](https://github.com/kuzzleio/kuzzle/compare/v2.30.0...v2.31.0-elasticsearch-8.1) (2024-05-15) diff --git a/package-lock.json b/package-lock.json index a46ccf8625..7598786925 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "kuzzle", - "version": "2.31.0-elasticsearch-8.1", + "version": "2.31.0-elasticsearch-8.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "kuzzle", - "version": "2.31.0-elasticsearch-8.1", + "version": "2.31.0-elasticsearch-8.2", "license": "Apache-2.0", "dependencies": { "aedes": "0.46.3", diff --git a/package.json b/package.json index 0e9da2d7b2..6b0063c893 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "kuzzle", "author": "The Kuzzle Team ", - "version": "2.31.0-elasticsearch-8.1", + "version": "2.31.0-elasticsearch-8.2", "description": "Kuzzle is an open-source solution that handles all the data management through a secured API, with a large choice of protocols.", "bin": "bin/start-kuzzle-server", "scripts": { From 4943d5420dfa3e747b872582fc029b2ccd919f23 Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 28 Aug 2024 10:24:28 +0200 Subject: [PATCH 55/59] chore(linting): linting the app --- lib/core/shared/repository.js | 401 ---------------------------------- test/.eslintrc.json | 3 +- 2 files changed, 2 insertions(+), 402 deletions(-) delete mode 100644 lib/core/shared/repository.js diff --git a/lib/core/shared/repository.js b/lib/core/shared/repository.js deleted file mode 100644 index cda8932b42..0000000000 --- a/lib/core/shared/repository.js +++ /dev/null @@ -1,401 +0,0 @@ -"use strict"; -/* - * Kuzzle, a backend software, self-hostable and ready to use - * to power modern apps - * - * Copyright 2015-2022 Kuzzle - * mailto: support AT kuzzle.io - * website: http://kuzzle.io - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Repository = void 0; -const cacheDbEnum_1 = __importDefault(require("../cache/cacheDbEnum")); -const kerror = __importStar(require("../../kerror")); -class Repository { - constructor({ cache = cacheDbEnum_1.default.INTERNAL, store = null } = {}) { - this.ttl = global.kuzzle.config.repositories.common.cacheTTL; - this.collection = null; - this.ObjectConstructor = null; - this.store = store; - this.index = store ? store.index : global.kuzzle.internalIndex.index; - this.cacheDb = cache; - } - async loadOneFromDatabase(id) { - let response; - try { - response = await this.store.get(this.collection, id); - } - catch (error) { - if (error.status === 404) { - throw kerror.get("services", "storage", "not_found", id); - } - throw error; - } - if (response._id) { - const dto = {}; - if (response._source) { - Object.assign(dto, response._source, { _id: response._id }); - } - else { - Object.assign(dto, response); - } - return this.fromDTO(dto); - } - return null; - } - async loadMultiFromDatabase(ids) { - const { items } = await this.store.mGet(this.collection, ids); - if (items.length === 0) { - return []; - } - const promises = []; - for (const item of items) { - promises.push(this.fromDTO({ - ...item._source, - _id: item._id, - })); - } - const objects = await Promise.all(promises); - return objects; - } - /** - * Search in database corresponding repository according to a query - * - * @param {object} searchBody - * @param {object} [options] - optional search arguments (from, size, scroll) - * @returns {Promise} - */ - async search(searchBody, options = {}) { - const response = await this.store.search(this.collection, searchBody, options); - return this.formatSearchResults(response); - } - /** - * Scroll over a paginated search request - */ - async scroll(scrollId, ttl) { - const response = await this.store.scroll(scrollId, ttl); - return this.formatSearchResults(response); - } - /** - * Loads an object from Cache. Returns a promise that resolves either to the - * retrieved object of null in case it is not found. - * - * The opts object currently accepts one optional parameter: key, which forces - * the cache key to fetch. - * In case the key is not provided, it defaults to repos///, i.e.: repos/%kuzzle/users/12 - * - * @param id - The id of the object to get - * @param options.key - Cache key. - */ - async loadFromCache(id, options = {}) { - const key = options.key || this.getCacheKey(id); - let response; - try { - response = await global.kuzzle.ask(`core:cache:${this.cacheDb}:get`, key); - if (response === null) { - return null; - } - return await this.fromDTO(Object.assign({}, JSON.parse(response))); - } - catch (err) { - throw kerror.get("services", "cache", "read_failed", err.message); - } - } - /** - * Loads an object from Cache or from the Database if not available in Cache. - * Returns a promise that resolves either to the - * retrieved object of null in case it is not found. - * - * If the object is not found in Cache and found in the Database, - * it will be written to cache also. - * - * The opts object currently accepts one optional parameter: key, which forces - * the cache key to fetch. - * In case the key is not provided, it defaults to /id - * (e.g. users/12) - * - * @param id - The id of the object to get - * @param options.key - Optional cache key - */ - async load(id, options = {}) { - if (this.cacheDb === cacheDbEnum_1.default.NONE) { - return this.loadOneFromDatabase(id); - } - const object = await this.loadFromCache(id, options); - if (object === null) { - if (this.store === null) { - return null; - } - const objectFromDatabase = await this.loadOneFromDatabase(id); - if (objectFromDatabase !== null) { - await this.persistToCache(objectFromDatabase); - } - return objectFromDatabase; - } - await this.refreshCacheTTL(object); - return object; - } - /** - * Persists the given object in the collection that is attached to the repository. - * - * @param object - The object to persist - * @param options.method - - * @returns {Promise} - */ - persistToDatabase(object, options = {}) { - const method = options.method || "createOrReplace"; - if (method === "create") { - return this.store.create(this.collection, this.serializeToDatabase(object), { ...options, id: object._id }); - } - return this.store[method](this.collection, object._id, this.serializeToDatabase(object), options); - } - /** - * Given an object with an id, delete it from the configured storage engines - * - * @param object - The object to delete - * @param options.key - if provided, removes the given key instead of the default one (/) - */ - async delete(object, options = {}) { - const promises = []; - if (this.cacheDb !== cacheDbEnum_1.default.NONE) { - promises.push(this.deleteFromCache(object._id, options)); - } - if (this.store) { - promises.push(this.deleteFromDatabase(object._id, options)); - } - await Promise.all(promises); - } - /** - * Delete repository from database according to its id - */ - deleteFromDatabase(id, options = {}) { - return this.store.delete(this.collection, id, options); - } - /** - * Persists the given ObjectConstructor object in cache. - * - * @param object - The object to persist - * @param options.key - if provided, stores the object to the given key instead of the default one (/) - * @param options.ttl - if provided, overrides the default ttl set on the repository for the current operation - */ - async persistToCache(object, options = {}) { - const key = options.key || this.getCacheKey(object._id); - const value = JSON.stringify(this.serializeToCache(object)); - const ttl = options.ttl !== undefined ? options.ttl : this.ttl; - await global.kuzzle.ask(`core:cache:${this.cacheDb}:store`, key, value, { - ttl, - }); - return object; - } - /** - * Removes the object from the Cache Engine - * - * @param id - * @param options.key - if provided, stores the object to the given key instead of the default one (/) - */ - async deleteFromCache(id, options = {}) { - const key = options.key || this.getCacheKey(id); - await global.kuzzle.ask(`core:cache:${this.cacheDb}:del`, key); - } - /** - * @param object - * @param options.key - if provided, stores the object to the given key instead of the default one (/) - * @param options.ttl - if provided, overrides the default ttl set on the repository for the current operation - */ - refreshCacheTTL(object, options = {}) { - const key = options.key || this.getCacheKey(object._id); - let ttl; - if (options.ttl !== undefined) { - ttl = options.ttl; - } - else if (object.ttl !== undefined) { - // if a TTL has been defined at the entry creation, we should - // use it - ttl = object.ttl; - } - else { - ttl = this.ttl; - } - if (ttl > 0) { - return global.kuzzle.ask(`core:cache:${this.cacheDb}:expire`, key, ttl); - } - return global.kuzzle.ask(`core:cache:${this.cacheDb}:persist`, key); - } - /** - * @param object - * @param options.key - if provided, stores the object to the given key instead of the default one (/) - */ - async expireFromCache(object, options = {}) { - const key = options.key || this.getCacheKey(object._id); - await global.kuzzle.ask(`core:cache:${this.cacheDb}:expire`, key, -1); - } - /** - * Serializes the object before being persisted to cache. - * - * @param object - The object to serialize - */ - serializeToCache(object) { - return this.toDTO(object); - } - /** - * Serializes the object before being persisted to the database. - * - * @param object - The object to serialize - */ - serializeToDatabase(object) { - const dto = this.toDTO(object); - delete dto._id; - return dto; - } - /** - * @param {string} id - */ - getCacheKey(id) { - return `repos/${this.index}/${this.collection}/${id}`; - } - /** - * @param {object} dto - * @returns {Promise} - */ - async fromDTO(dto) { - const o = new this.ObjectConstructor(); - Object.assign(o, dto); - return o; - } - /** - * @param {ObjectConstructor} o - * @returns {object} - */ - toDTO(o) { - return Object.assign({}, o); - } - /** - * Recursively delete all objects in repository with a scroll - * - * @param {object} options - ES options (refresh) - * @param {object} part - * @returns {Promise} total deleted objects - */ - async truncate(options) { - // Allows safe overrides, as _truncate is called recursively - return this._truncate(options); - } - /** - * Do not override this: this function calls itself. - */ - async _truncate(options, part = null) { - if (part === null) { - const objects = await this.search({}, { refresh: options.refresh, scroll: "5s", size: 100 }); - const deleted = await this.truncatePart(objects, options); - if (objects.hits.length < objects.total) { - const total = await this._truncate(options, { - fetched: objects.hits.length, - scrollId: objects.scrollId, - total: objects.total, - }); - return deleted + total; - } - return deleted; - } - const objects = await this.scroll(part.scrollId, "5s"); - const deleted = await this.truncatePart(objects, options); - part.fetched += objects.hits.length; - if (part.fetched < part.total) { - part.scrollId = objects.scrollId; - const total = await this._truncate(options, part); - return deleted + total; - } - return deleted; - } - /** - * @param {Array} objects - * @param {object} options - * @returns {Promise} count of deleted objects - */ - async truncatePart(objects, options) { - const promises = []; - const processObject = async (object) => { - // profile and role repositories have protected objects, we can't delete - // them - const protectedObjects = ["profiles", "roles"].indexOf(this.collection) !== -1 - ? ["admin", "default", "anonymous"] - : []; - if (protectedObjects.indexOf(object._id) !== -1) { - return 0; - } - const loaded = await this.load(object._id); - await this.delete(loaded, options); - return 1; - }; - for (const hit of objects.hits) { - promises.push(processObject(hit)); - } - const results = await Promise.all(promises); - return results.reduce((total, deleted) => total + deleted, 0); - } - /** - * Given a raw search response from ES, returns a {total: int, hits: []} object - * @param {object} raw - * @returns {Promise} - * @private - */ - async formatSearchResults(raw) { - const result = { - aggregations: raw.aggregations, - hits: [], - scrollId: raw.scrollId, - total: raw.total, - }; - if (raw.hits && raw.hits.length > 0) { - const promises = []; - for (const hit of raw.hits) { - promises.push(this.fromDTO({ - ...hit._source, - _id: hit._id, - })); - } - result.hits = await Promise.all(promises); - } - return result; - } -} -exports.Repository = Repository; -//# sourceMappingURL=repository.js.map \ No newline at end of file diff --git a/test/.eslintrc.json b/test/.eslintrc.json index 9ef147e3f6..2971f971e7 100644 --- a/test/.eslintrc.json +++ b/test/.eslintrc.json @@ -3,6 +3,7 @@ "func-names": 0, "no-invalid-this": 0, "no-new": 0, - "new-cap": 0 + "new-cap": 0, + "sort-keys": 0 } } From b2bd1429eced6e1829cc801c400f23dc90d69e6a Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 28 Aug 2024 10:42:04 +0200 Subject: [PATCH 56/59] chore(lint): linting the app again --- lib/kuzzle/event/KuzzleEventEmitter.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/kuzzle/event/KuzzleEventEmitter.ts b/lib/kuzzle/event/KuzzleEventEmitter.ts index 6baacad459..a27e977061 100644 --- a/lib/kuzzle/event/KuzzleEventEmitter.ts +++ b/lib/kuzzle/event/KuzzleEventEmitter.ts @@ -400,7 +400,7 @@ async function pipeCallback(error: any, ...updated: any[]) { const corePipes = this.instance.corePipes.get(this.targetEvent); if (corePipes) { - await Bluebird.map(corePipes, (fn) => fn(...updated)); + await Bluebird.map(corePipes, (fn: any) => fn(...updated)); } for (const element of this.events) { From 1822be23e680f72e2d3b96fdac16d9cbae12a894 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Wed, 28 Aug 2024 09:03:16 +0000 Subject: [PATCH 57/59] chore(release): 2.32.0-elasticsearch-8.1 [skip ci] # [2.32.0-elasticsearch-8.1](https://github.com/kuzzleio/kuzzle/compare/v2.31.0...v2.32.0-elasticsearch-8.1) (2024-08-28) ### Bug Fixes * **ci:** indent to pass linter ([f849e2c](https://github.com/kuzzleio/kuzzle/commit/f849e2c9d2aff5a3636a0b2fc0d11ac86561277a)) * **conflicts:** merge conflict ([e763392](https://github.com/kuzzleio/kuzzle/commit/e76339261029262aac31af972dc81f05a082e469)) * **es8:** add elasticsearch-8 in listened branches ([e59cedd](https://github.com/kuzzleio/kuzzle/commit/e59cedd2a2404e315024f18eb3823af03e341411)) * **es8:** update deployment to allow elasticsearch-8 package to be deployed as a pre-release ([8286522](https://github.com/kuzzleio/kuzzle/commit/8286522368ee70efd54a6d631a5917f02d96b952)) * **kuzzleeventemitter:** add missing generics parameters ([48cbcf2](https://github.com/kuzzleio/kuzzle/commit/48cbcf2e2713e96a1e5e21ba6bf23452669edaad)) * **storage:** update more types ([50bfe58](https://github.com/kuzzleio/kuzzle/commit/50bfe580db9e86d6e1de761605ac86debdae2e48)) * **tests:** fixing unit tests ([50f2cac](https://github.com/kuzzleio/kuzzle/commit/50f2cac418f1616ec1c9c68ef149f8072a7a45e4)) * **triggerEvents:** fix response format for request with triggerEvents ([#2546](https://github.com/kuzzleio/kuzzle/issues/2546)) ([ffed901](https://github.com/kuzzleio/kuzzle/commit/ffed901d6051d6c0558019d1b67d30fdec3093ff)) ### Features * **dockerfiles:** move images from bullseye to bookworm ([#2545](https://github.com/kuzzleio/kuzzle/issues/2545)) ([c225aa1](https://github.com/kuzzleio/kuzzle/commit/c225aa183267fcdbd842e37fce1e4091780e7b1d)) * **elasticsearch8:** tests unit are now okay ([1f4d1e8](https://github.com/kuzzleio/kuzzle/commit/1f4d1e8686c3f460496f8a73d983371a173d3a14)) * **es8:** elasticsearch 8 unit tests & functional tests running ([bdcce96](https://github.com/kuzzleio/kuzzle/commit/bdcce96fc97ff58143f29484ae1da2076ee2e885)) * **es8:** support both es7 and es8 ([e12c35a](https://github.com/kuzzleio/kuzzle/commit/e12c35af6b3a8d375005177cdf56509396db5cb4)) * only support ES 8.x ([4a8038e](https://github.com/kuzzleio/kuzzle/commit/4a8038e7729a9cdf5b9d7b2c0540899f8911d11c)) * **storage:** add more types, fix some requests ([a18d454](https://github.com/kuzzleio/kuzzle/commit/a18d454b36d5fe565ad6b08a772c13c3e6a16bab)) * **storage:** upgrade to the Elasticsearch 8 client ([6753640](https://github.com/kuzzleio/kuzzle/commit/675364013e3b07fc665bfea70e2489b28bad0d8e)) --- CHANGELOG.md | 27 ++++++++++++++++++++++++++- package-lock.json | 4 ++-- package.json | 2 +- 3 files changed, 29 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65190e7020..0e6c190d2e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,28 @@ +# [2.32.0-elasticsearch-8.1](https://github.com/kuzzleio/kuzzle/compare/v2.31.0...v2.32.0-elasticsearch-8.1) (2024-08-28) + + +### Bug Fixes + +* **ci:** indent to pass linter ([f849e2c](https://github.com/kuzzleio/kuzzle/commit/f849e2c9d2aff5a3636a0b2fc0d11ac86561277a)) +* **conflicts:** merge conflict ([e763392](https://github.com/kuzzleio/kuzzle/commit/e76339261029262aac31af972dc81f05a082e469)) +* **es8:** add elasticsearch-8 in listened branches ([e59cedd](https://github.com/kuzzleio/kuzzle/commit/e59cedd2a2404e315024f18eb3823af03e341411)) +* **es8:** update deployment to allow elasticsearch-8 package to be deployed as a pre-release ([8286522](https://github.com/kuzzleio/kuzzle/commit/8286522368ee70efd54a6d631a5917f02d96b952)) +* **kuzzleeventemitter:** add missing generics parameters ([48cbcf2](https://github.com/kuzzleio/kuzzle/commit/48cbcf2e2713e96a1e5e21ba6bf23452669edaad)) +* **storage:** update more types ([50bfe58](https://github.com/kuzzleio/kuzzle/commit/50bfe580db9e86d6e1de761605ac86debdae2e48)) +* **tests:** fixing unit tests ([50f2cac](https://github.com/kuzzleio/kuzzle/commit/50f2cac418f1616ec1c9c68ef149f8072a7a45e4)) +* **triggerEvents:** fix response format for request with triggerEvents ([#2546](https://github.com/kuzzleio/kuzzle/issues/2546)) ([ffed901](https://github.com/kuzzleio/kuzzle/commit/ffed901d6051d6c0558019d1b67d30fdec3093ff)) + + +### Features + +* **dockerfiles:** move images from bullseye to bookworm ([#2545](https://github.com/kuzzleio/kuzzle/issues/2545)) ([c225aa1](https://github.com/kuzzleio/kuzzle/commit/c225aa183267fcdbd842e37fce1e4091780e7b1d)) +* **elasticsearch8:** tests unit are now okay ([1f4d1e8](https://github.com/kuzzleio/kuzzle/commit/1f4d1e8686c3f460496f8a73d983371a173d3a14)) +* **es8:** elasticsearch 8 unit tests & functional tests running ([bdcce96](https://github.com/kuzzleio/kuzzle/commit/bdcce96fc97ff58143f29484ae1da2076ee2e885)) +* **es8:** support both es7 and es8 ([e12c35a](https://github.com/kuzzleio/kuzzle/commit/e12c35af6b3a8d375005177cdf56509396db5cb4)) +* only support ES 8.x ([4a8038e](https://github.com/kuzzleio/kuzzle/commit/4a8038e7729a9cdf5b9d7b2c0540899f8911d11c)) +* **storage:** add more types, fix some requests ([a18d454](https://github.com/kuzzleio/kuzzle/commit/a18d454b36d5fe565ad6b08a772c13c3e6a16bab)) +* **storage:** upgrade to the Elasticsearch 8 client ([6753640](https://github.com/kuzzleio/kuzzle/commit/675364013e3b07fc665bfea70e2489b28bad0d8e)) + # [2.31.0-elasticsearch-8.2](https://github.com/kuzzleio/kuzzle/compare/v2.31.0-elasticsearch-8.1...v2.31.0-elasticsearch-8.2) (2024-06-24) @@ -134,4 +159,4 @@ ### Features -* **semantic-release:** add semantic release ([dba84a4](https://github.com/kuzzleio/kuzzle/commit/dba84a4788bcf0ff20000002891f859f4b8a420e)) \ No newline at end of file +* **semantic-release:** add semantic release ([dba84a4](https://github.com/kuzzleio/kuzzle/commit/dba84a4788bcf0ff20000002891f859f4b8a420e)) diff --git a/package-lock.json b/package-lock.json index 49a125ccb6..a115f7a31e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "kuzzle", - "version": "2.31.0", + "version": "2.32.0-elasticsearch-8.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "kuzzle", - "version": "2.31.0", + "version": "2.32.0-elasticsearch-8.1", "license": "Apache-2.0", "dependencies": { "aedes": "0.46.3", diff --git a/package.json b/package.json index 41f8136c47..983aec83c3 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "kuzzle", "author": "The Kuzzle Team ", - "version": "2.31.0", + "version": "2.32.0-elasticsearch-8.1", "description": "Kuzzle is an open-source solution that handles all the data management through a secured API, with a large choice of protocols.", "bin": "bin/start-kuzzle-server", "scripts": { From 3403a28f386a69184fd3cc1bbdcfd3990cb254ee Mon Sep 17 00:00:00 2001 From: rolljee Date: Wed, 28 Aug 2024 15:49:01 +0200 Subject: [PATCH 58/59] refactor(typescript): move queryTranslator to ts and export it --- .eslintignore | 1 + index.ts | 2 ++ lib/kerror/index.ts | 2 +- lib/service/storage/7/elasticsearch.ts | 2 +- lib/service/storage/8/elasticsearch.ts | 2 +- .../{queryTranslator.js => queryTranslator.ts} | 13 +++++-------- test/service/storage/queryTranslator.test.js | 4 +++- 7 files changed, 14 insertions(+), 12 deletions(-) rename lib/service/storage/commons/{queryTranslator.js => queryTranslator.ts} (96%) diff --git a/.eslintignore b/.eslintignore index 7a28cbcd43..bc22f9a1ff 100644 --- a/.eslintignore +++ b/.eslintignore @@ -130,6 +130,7 @@ lib/config/documentEventAliases.js lib/service/storage/Elasticsearch.js lib/service/storage/7/elasticsearch.js lib/service/storage/8/elasticsearch.js +lib/service/storage/commons/queryTranslator.js lib/types/DebugModule.js lib/util/time.js lib/util/async.js diff --git a/index.ts b/index.ts index 4e2f707175..60974dd036 100644 --- a/index.ts +++ b/index.ts @@ -27,3 +27,5 @@ export * from "./lib/core/shared/store"; export * from "./lib/core/cache/cacheDbEnum"; export * from "./lib/core/storage/storeScopeEnum"; + +export * from "./lib/service/storage/commons/queryTranslator"; diff --git a/lib/kerror/index.ts b/lib/kerror/index.ts index cf6f7fc545..dba3920791 100644 --- a/lib/kerror/index.ts +++ b/lib/kerror/index.ts @@ -206,7 +206,7 @@ export function rawGetFrom( // If a stacktrace is present, we need to modify the first line because it // still contains the original error message - if (derivedError.stack && derivedError.stack.length && source.stack) { + if (derivedError?.stack?.length && source?.stack) { const stackArray = source.stack.split("\n"); stackArray.shift(); derivedError.stack = [ diff --git a/lib/service/storage/7/elasticsearch.ts b/lib/service/storage/7/elasticsearch.ts index 84b2a1c569..2c015927ae 100644 --- a/lib/service/storage/7/elasticsearch.ts +++ b/lib/service/storage/7/elasticsearch.ts @@ -40,7 +40,7 @@ import semver from "semver"; import debug from "../../../util/debug"; import ESWrapper from "./esWrapper"; -import QueryTranslator from "../commons/queryTranslator"; +import { QueryTranslator } from "../commons/queryTranslator"; import didYouMean from "../../../util/didYouMean"; import * as kerror from "../../../kerror"; import { assertIsObject } from "../../../util/requestAssertions"; diff --git a/lib/service/storage/8/elasticsearch.ts b/lib/service/storage/8/elasticsearch.ts index f75655f750..b520c03ac7 100644 --- a/lib/service/storage/8/elasticsearch.ts +++ b/lib/service/storage/8/elasticsearch.ts @@ -42,7 +42,7 @@ import semver from "semver"; import debug from "../../../util/debug"; import ESWrapper from "./esWrapper"; -import QueryTranslator from "../commons/queryTranslator"; +import { QueryTranslator } from "../commons/queryTranslator"; import didYouMean from "../../../util/didYouMean"; import * as kerror from "../../../kerror"; import { assertIsObject } from "../../../util/requestAssertions"; diff --git a/lib/service/storage/commons/queryTranslator.js b/lib/service/storage/commons/queryTranslator.ts similarity index 96% rename from lib/service/storage/commons/queryTranslator.js rename to lib/service/storage/commons/queryTranslator.ts index 0d296949ca..fa57602fb6 100644 --- a/lib/service/storage/commons/queryTranslator.js +++ b/lib/service/storage/commons/queryTranslator.ts @@ -18,12 +18,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -"use strict"; - -const kerror = require("../../../kerror"); +import { get } from "../../../kerror"; class KeywordError extends Error { + public keyword: { name: string; type: string }; + constructor(type, name) { super( `The ${type} "${name}" of Koncorde DSL is not supported for search queries.`, @@ -54,7 +53,7 @@ function parseKoncordePath(path) { const lastBracket = path.lastIndexOf("]"); if (lastBracket < 0) { - throw kerror.get( + throw get( "services", "koncorde", "elastic_translation_error", @@ -172,7 +171,7 @@ const KONCORDE_OPERATORS_TO_ES = { }), }; -class QueryTranslator { +export class QueryTranslator { translate(filters) { const [name, value] = Object.entries(filters)[0]; @@ -215,5 +214,3 @@ class QueryTranslator { return converter(content); } } - -module.exports = QueryTranslator; diff --git a/test/service/storage/queryTranslator.test.js b/test/service/storage/queryTranslator.test.js index 1590401fcb..1108e13fd9 100644 --- a/test/service/storage/queryTranslator.test.js +++ b/test/service/storage/queryTranslator.test.js @@ -2,7 +2,9 @@ const should = require("should"); -const QueryTranslator = require("../../../lib/service/storage/commons/queryTranslator"); +const { + QueryTranslator, +} = require("../../../lib/service/storage/commons/queryTranslator"); describe("QueryTranslator", () => { const translator = new QueryTranslator(); From 8115e64b22221de552d2936cee2a0823d088e255 Mon Sep 17 00:00:00 2001 From: Kuruyia Date: Fri, 30 Aug 2024 08:29:39 +0200 Subject: [PATCH 59/59] chore: add queryTranslator.js to the gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 584b114879..e7412cac00 100644 --- a/.gitignore +++ b/.gitignore @@ -173,6 +173,7 @@ lib/model/security/token.js lib/model/security/user.js lib/service/storage/7/elasticsearch.js lib/service/storage/8/elasticsearch.js +lib/service/storage/commons/queryTranslator.js lib/service/storage/Elasticsearch.js lib/types/ClientConnection.js lib/types/config/DumpConfiguration.js