From 427848c3be63671d337659cca05b1e38dce1cd16 Mon Sep 17 00:00:00 2001 From: Joel Griffith Date: Tue, 24 Mar 2020 09:07:39 -0700 Subject: [PATCH] Revert "[Reporting/New Platform Migration] Use a new config service on server-side (#55882)" (#61075) This reverts commit 5755b2ac522483bd71ad0e1b31459338ff69cf93. --- .../__snapshots__/index.test.js.snap | 383 ++++++++++++++++++ x-pack/legacy/plugins/reporting/config.ts | 182 +++++++++ .../execute_job/decrypt_job_headers.test.ts | 22 +- .../common/execute_job/decrypt_job_headers.ts | 8 +- .../get_conditional_headers.test.ts | 173 ++++++-- .../execute_job/get_conditional_headers.ts | 20 +- .../execute_job/get_custom_logo.test.ts | 14 +- .../common/execute_job/get_custom_logo.ts | 11 +- .../common/execute_job/get_full_urls.test.ts | 80 ++-- .../common/execute_job/get_full_urls.ts | 22 +- .../common/layouts/create_layout.ts | 7 +- .../common/layouts/print_layout.ts | 9 +- .../lib/screenshots/get_number_of_items.ts | 7 +- .../common/lib/screenshots/observable.test.ts | 19 +- .../common/lib/screenshots/observable.ts | 18 +- .../common/lib/screenshots/open_url.ts | 11 +- .../common/lib/screenshots/types.ts | 2 +- .../common/lib/screenshots/wait_for_render.ts | 4 +- .../screenshots/wait_for_visualizations.ts | 7 +- .../export_types/csv/server/create_job.ts | 6 +- .../csv/server/execute_job.test.js | 344 +++++++++++++--- .../export_types/csv/server/execute_job.ts | 30 +- .../csv/server/lib/hit_iterator.test.ts | 3 +- .../csv/server/lib/hit_iterator.ts | 5 +- .../reporting/export_types/csv/types.d.ts | 5 +- .../server/create_job/create_job.ts | 19 +- .../server/execute_job.ts | 23 +- .../server/lib/generate_csv.ts | 16 +- .../server/lib/generate_csv_search.ts | 20 +- .../csv_from_savedobject/types.d.ts | 5 +- .../png/server/create_job/index.ts | 6 +- .../png/server/execute_job/index.test.js | 94 +++-- .../png/server/execute_job/index.ts | 23 +- .../png/server/lib/generate_png.ts | 7 +- .../printable_pdf/server/create_job/index.ts | 6 +- .../server/execute_job/index.test.js | 80 ++-- .../printable_pdf/server/execute_job/index.ts | 25 +- .../printable_pdf/server/lib/generate_pdf.ts | 9 +- .../export_types/printable_pdf/types.d.ts | 2 +- x-pack/legacy/plugins/reporting/index.test.js | 34 ++ x-pack/legacy/plugins/reporting/index.ts | 16 +- .../plugins/reporting/log_configuration.ts | 23 +- .../browsers/chromium/driver_factory/args.ts | 7 +- .../browsers/chromium/driver_factory/index.ts | 19 +- .../server/browsers/chromium/index.ts | 5 +- .../browsers/create_browser_driver_factory.ts | 22 +- .../browsers/download/ensure_downloaded.ts | 13 +- .../server/browsers/network_policy.ts | 9 +- .../reporting/server/browsers/types.d.ts | 2 + .../plugins/reporting/server/config/config.js | 21 + .../legacy/plugins/reporting/server/core.ts | 72 +--- .../legacy/plugins/reporting/server/index.ts | 2 +- .../legacy/plugins/reporting/server/legacy.ts | 73 +--- .../reporting/server/lib/create_queue.ts | 20 +- .../server/lib/create_worker.test.ts | 39 +- .../reporting/server/lib/create_worker.ts | 24 +- .../plugins/reporting/server/lib/crypto.ts | 7 +- .../reporting/server/lib/enqueue_job.ts | 31 +- .../lib/esqueue/helpers/index_timestamp.js | 1 - .../plugins/reporting/server/lib/get_user.ts | 4 +- .../plugins/reporting/server/lib/index.ts | 9 +- .../reporting/server/lib/jobs_query.ts | 10 +- .../reporting/server/lib/once_per_server.ts | 43 ++ .../__tests__/validate_encryption_key.js | 34 ++ .../__tests__/validate_server_host.ts | 30 ++ .../reporting/server/lib/validate/index.ts | 13 +- .../server/lib/validate/validate_browser.ts | 4 +- .../lib/validate/validate_encryption_key.ts | 31 ++ .../validate_max_content_length.test.js | 16 +- .../validate/validate_max_content_length.ts | 14 +- .../lib/validate/validate_server_host.ts | 27 ++ .../legacy/plugins/reporting/server/plugin.ts | 24 +- .../server/routes/generate_from_jobparams.ts | 5 +- .../routes/generate_from_savedobject.ts | 5 +- .../generate_from_savedobject_immediate.ts | 18 +- .../server/routes/generation.test.ts | 11 +- .../reporting/server/routes/generation.ts | 15 +- .../plugins/reporting/server/routes/index.ts | 7 +- .../reporting/server/routes/jobs.test.js | 46 +-- .../plugins/reporting/server/routes/jobs.ts | 15 +- .../lib/authorized_user_pre_routing.test.js | 131 +++--- .../routes/lib/authorized_user_pre_routing.ts | 16 +- .../server/routes/lib/get_document_payload.ts | 31 +- .../server/routes/lib/job_response_handler.ts | 15 +- .../lib/reporting_feature_pre_routing.ts | 8 +- .../routes/lib/route_config_factories.ts | 28 +- .../plugins/reporting/server/types.d.ts | 11 +- .../server/usage/get_reporting_usage.ts | 28 +- .../usage/reporting_usage_collector.test.js | 152 ++++--- .../server/usage/reporting_usage_collector.ts | 23 +- .../create_mock_browserdriverfactory.ts | 45 +- .../create_mock_layoutinstance.ts | 8 +- .../create_mock_reportingplugin.ts | 22 +- .../test_helpers/create_mock_server.ts | 34 +- x-pack/legacy/plugins/reporting/types.d.ts | 62 ++- x-pack/plugins/reporting/config.ts | 10 + x-pack/plugins/reporting/kibana.json | 6 +- .../reporting/server/config/index.test.ts | 122 ------ .../plugins/reporting/server/config/index.ts | 85 ---- .../reporting/server/config/schema.test.ts | 103 ----- .../plugins/reporting/server/config/schema.ts | 174 -------- x-pack/plugins/reporting/server/index.ts | 14 - x-pack/plugins/reporting/server/plugin.ts | 38 -- 103 files changed, 2192 insertions(+), 1522 deletions(-) create mode 100644 x-pack/legacy/plugins/reporting/__snapshots__/index.test.js.snap create mode 100644 x-pack/legacy/plugins/reporting/config.ts create mode 100644 x-pack/legacy/plugins/reporting/index.test.js create mode 100644 x-pack/legacy/plugins/reporting/server/config/config.js create mode 100644 x-pack/legacy/plugins/reporting/server/lib/once_per_server.ts create mode 100644 x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js create mode 100644 x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts create mode 100644 x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts create mode 100644 x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts create mode 100644 x-pack/plugins/reporting/config.ts delete mode 100644 x-pack/plugins/reporting/server/config/index.test.ts delete mode 100644 x-pack/plugins/reporting/server/config/index.ts delete mode 100644 x-pack/plugins/reporting/server/config/schema.test.ts delete mode 100644 x-pack/plugins/reporting/server/config/schema.ts delete mode 100644 x-pack/plugins/reporting/server/index.ts delete mode 100644 x-pack/plugins/reporting/server/plugin.ts diff --git a/x-pack/legacy/plugins/reporting/__snapshots__/index.test.js.snap b/x-pack/legacy/plugins/reporting/__snapshots__/index.test.js.snap new file mode 100644 index 0000000000000..757677f1d4f82 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/__snapshots__/index.test.js.snap @@ -0,0 +1,383 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`config schema with context {"dev":false,"dist":false} produces correct config 1`] = ` +Object { + "capture": Object { + "browser": Object { + "autoDownload": true, + "chromium": Object { + "disableSandbox": "", + "maxScreenshotDimension": 1950, + "proxy": Object { + "enabled": false, + }, + }, + "type": "chromium", + }, + "concurrency": 4, + "loadDelay": 3000, + "maxAttempts": 1, + "networkPolicy": Object { + "enabled": true, + "rules": Array [ + Object { + "allow": true, + "protocol": "http:", + }, + Object { + "allow": true, + "protocol": "https:", + }, + Object { + "allow": true, + "protocol": "ws:", + }, + Object { + "allow": true, + "protocol": "wss:", + }, + Object { + "allow": true, + "protocol": "data:", + }, + Object { + "allow": false, + }, + ], + }, + "settleTime": 1000, + "timeout": 20000, + "timeouts": Object { + "openUrl": 30000, + "renderComplete": 30000, + "waitForElements": 30000, + }, + "viewport": Object { + "height": 1200, + "width": 1950, + }, + "zoom": 2, + }, + "csv": Object { + "checkForFormulas": true, + "enablePanelActionDownload": true, + "maxSizeBytes": 10485760, + "scroll": Object { + "duration": "30s", + "size": 500, + }, + }, + "enabled": true, + "encryptionKey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "index": ".reporting", + "kibanaServer": Object {}, + "poll": Object { + "jobCompletionNotifier": Object { + "interval": 10000, + "intervalErrorMultiplier": 5, + }, + "jobsRefresh": Object { + "interval": 5000, + "intervalErrorMultiplier": 5, + }, + }, + "queue": Object { + "indexInterval": "week", + "pollEnabled": true, + "pollInterval": 3000, + "pollIntervalErrorMultiplier": 10, + "timeout": 120000, + }, + "roles": Object { + "allow": Array [ + "reporting_user", + ], + }, +} +`; + +exports[`config schema with context {"dev":false,"dist":true} produces correct config 1`] = ` +Object { + "capture": Object { + "browser": Object { + "autoDownload": false, + "chromium": Object { + "disableSandbox": "", + "maxScreenshotDimension": 1950, + "proxy": Object { + "enabled": false, + }, + }, + "type": "chromium", + }, + "concurrency": 4, + "loadDelay": 3000, + "maxAttempts": 3, + "networkPolicy": Object { + "enabled": true, + "rules": Array [ + Object { + "allow": true, + "protocol": "http:", + }, + Object { + "allow": true, + "protocol": "https:", + }, + Object { + "allow": true, + "protocol": "ws:", + }, + Object { + "allow": true, + "protocol": "wss:", + }, + Object { + "allow": true, + "protocol": "data:", + }, + Object { + "allow": false, + }, + ], + }, + "settleTime": 1000, + "timeout": 20000, + "timeouts": Object { + "openUrl": 30000, + "renderComplete": 30000, + "waitForElements": 30000, + }, + "viewport": Object { + "height": 1200, + "width": 1950, + }, + "zoom": 2, + }, + "csv": Object { + "checkForFormulas": true, + "enablePanelActionDownload": true, + "maxSizeBytes": 10485760, + "scroll": Object { + "duration": "30s", + "size": 500, + }, + }, + "enabled": true, + "index": ".reporting", + "kibanaServer": Object {}, + "poll": Object { + "jobCompletionNotifier": Object { + "interval": 10000, + "intervalErrorMultiplier": 5, + }, + "jobsRefresh": Object { + "interval": 5000, + "intervalErrorMultiplier": 5, + }, + }, + "queue": Object { + "indexInterval": "week", + "pollEnabled": true, + "pollInterval": 3000, + "pollIntervalErrorMultiplier": 10, + "timeout": 120000, + }, + "roles": Object { + "allow": Array [ + "reporting_user", + ], + }, +} +`; + +exports[`config schema with context {"dev":true,"dist":false} produces correct config 1`] = ` +Object { + "capture": Object { + "browser": Object { + "autoDownload": true, + "chromium": Object { + "disableSandbox": "", + "maxScreenshotDimension": 1950, + "proxy": Object { + "enabled": false, + }, + }, + "type": "chromium", + }, + "concurrency": 4, + "loadDelay": 3000, + "maxAttempts": 1, + "networkPolicy": Object { + "enabled": true, + "rules": Array [ + Object { + "allow": true, + "protocol": "http:", + }, + Object { + "allow": true, + "protocol": "https:", + }, + Object { + "allow": true, + "protocol": "ws:", + }, + Object { + "allow": true, + "protocol": "wss:", + }, + Object { + "allow": true, + "protocol": "data:", + }, + Object { + "allow": false, + }, + ], + }, + "settleTime": 1000, + "timeout": 20000, + "timeouts": Object { + "openUrl": 30000, + "renderComplete": 30000, + "waitForElements": 30000, + }, + "viewport": Object { + "height": 1200, + "width": 1950, + }, + "zoom": 2, + }, + "csv": Object { + "checkForFormulas": true, + "enablePanelActionDownload": true, + "maxSizeBytes": 10485760, + "scroll": Object { + "duration": "30s", + "size": 500, + }, + }, + "enabled": true, + "encryptionKey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "index": ".reporting", + "kibanaServer": Object {}, + "poll": Object { + "jobCompletionNotifier": Object { + "interval": 10000, + "intervalErrorMultiplier": 5, + }, + "jobsRefresh": Object { + "interval": 5000, + "intervalErrorMultiplier": 5, + }, + }, + "queue": Object { + "indexInterval": "week", + "pollEnabled": true, + "pollInterval": 3000, + "pollIntervalErrorMultiplier": 10, + "timeout": 120000, + }, + "roles": Object { + "allow": Array [ + "reporting_user", + ], + }, +} +`; + +exports[`config schema with context {"dev":true,"dist":true} produces correct config 1`] = ` +Object { + "capture": Object { + "browser": Object { + "autoDownload": false, + "chromium": Object { + "disableSandbox": "", + "maxScreenshotDimension": 1950, + "proxy": Object { + "enabled": false, + }, + }, + "type": "chromium", + }, + "concurrency": 4, + "loadDelay": 3000, + "maxAttempts": 3, + "networkPolicy": Object { + "enabled": true, + "rules": Array [ + Object { + "allow": true, + "protocol": "http:", + }, + Object { + "allow": true, + "protocol": "https:", + }, + Object { + "allow": true, + "protocol": "ws:", + }, + Object { + "allow": true, + "protocol": "wss:", + }, + Object { + "allow": true, + "protocol": "data:", + }, + Object { + "allow": false, + }, + ], + }, + "settleTime": 1000, + "timeout": 20000, + "timeouts": Object { + "openUrl": 30000, + "renderComplete": 30000, + "waitForElements": 30000, + }, + "viewport": Object { + "height": 1200, + "width": 1950, + }, + "zoom": 2, + }, + "csv": Object { + "checkForFormulas": true, + "enablePanelActionDownload": true, + "maxSizeBytes": 10485760, + "scroll": Object { + "duration": "30s", + "size": 500, + }, + }, + "enabled": true, + "index": ".reporting", + "kibanaServer": Object {}, + "poll": Object { + "jobCompletionNotifier": Object { + "interval": 10000, + "intervalErrorMultiplier": 5, + }, + "jobsRefresh": Object { + "interval": 5000, + "intervalErrorMultiplier": 5, + }, + }, + "queue": Object { + "indexInterval": "week", + "pollEnabled": true, + "pollInterval": 3000, + "pollIntervalErrorMultiplier": 10, + "timeout": 120000, + }, + "roles": Object { + "allow": Array [ + "reporting_user", + ], + }, +} +`; diff --git a/x-pack/legacy/plugins/reporting/config.ts b/x-pack/legacy/plugins/reporting/config.ts new file mode 100644 index 0000000000000..211fa70301bbf --- /dev/null +++ b/x-pack/legacy/plugins/reporting/config.ts @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { BROWSER_TYPE } from './common/constants'; +// @ts-ignore untyped module +import { config as appConfig } from './server/config/config'; +import { getDefaultChromiumSandboxDisabled } from './server/browsers'; + +export async function config(Joi: any) { + return Joi.object({ + enabled: Joi.boolean().default(true), + kibanaServer: Joi.object({ + protocol: Joi.string().valid(['http', 'https']), + hostname: Joi.string().invalid('0'), + port: Joi.number().integer(), + }).default(), + queue: Joi.object({ + indexInterval: Joi.string().default('week'), + pollEnabled: Joi.boolean().default(true), + pollInterval: Joi.number() + .integer() + .default(3000), + pollIntervalErrorMultiplier: Joi.number() + .integer() + .default(10), + timeout: Joi.number() + .integer() + .default(120000), + }).default(), + capture: Joi.object({ + timeouts: Joi.object({ + openUrl: Joi.number() + .integer() + .default(30000), + waitForElements: Joi.number() + .integer() + .default(30000), + renderComplete: Joi.number() + .integer() + .default(30000), + }).default(), + networkPolicy: Joi.object({ + enabled: Joi.boolean().default(true), + rules: Joi.array() + .items( + Joi.object({ + allow: Joi.boolean().required(), + protocol: Joi.string(), + host: Joi.string(), + }) + ) + .default([ + { allow: true, protocol: 'http:' }, + { allow: true, protocol: 'https:' }, + { allow: true, protocol: 'ws:' }, + { allow: true, protocol: 'wss:' }, + { allow: true, protocol: 'data:' }, + { allow: false }, // Default action is to deny! + ]), + }).default(), + zoom: Joi.number() + .integer() + .default(2), + viewport: Joi.object({ + width: Joi.number() + .integer() + .default(1950), + height: Joi.number() + .integer() + .default(1200), + }).default(), + timeout: Joi.number() + .integer() + .default(20000), // deprecated + loadDelay: Joi.number() + .integer() + .default(3000), + settleTime: Joi.number() + .integer() + .default(1000), // deprecated + concurrency: Joi.number() + .integer() + .default(appConfig.concurrency), // deprecated + browser: Joi.object({ + type: Joi.any() + .valid(BROWSER_TYPE) + .default(BROWSER_TYPE), + autoDownload: Joi.boolean().when('$dist', { + is: true, + then: Joi.default(false), + otherwise: Joi.default(true), + }), + chromium: Joi.object({ + inspect: Joi.boolean() + .when('$dev', { + is: false, + then: Joi.valid(false), + else: Joi.default(false), + }) + .default(), + disableSandbox: Joi.boolean().default(await getDefaultChromiumSandboxDisabled()), + proxy: Joi.object({ + enabled: Joi.boolean().default(false), + server: Joi.string() + .uri({ scheme: ['http', 'https'] }) + .when('enabled', { + is: Joi.valid(false), + then: Joi.valid(null), + else: Joi.required(), + }), + bypass: Joi.array() + .items(Joi.string().regex(/^[^\s]+$/)) + .when('enabled', { + is: Joi.valid(false), + then: Joi.valid(null), + else: Joi.default([]), + }), + }).default(), + maxScreenshotDimension: Joi.number() + .integer() + .default(1950), + }).default(), + }).default(), + maxAttempts: Joi.number() + .integer() + .greater(0) + .when('$dist', { + is: true, + then: Joi.default(3), + otherwise: Joi.default(1), + }) + .default(), + }).default(), + csv: Joi.object({ + checkForFormulas: Joi.boolean().default(true), + enablePanelActionDownload: Joi.boolean().default(true), + maxSizeBytes: Joi.number() + .integer() + .default(1024 * 1024 * 10), // bytes in a kB * kB in a mB * 10 + scroll: Joi.object({ + duration: Joi.string() + .regex(/^[0-9]+(d|h|m|s|ms|micros|nanos)$/, { name: 'DurationString' }) + .default('30s'), + size: Joi.number() + .integer() + .default(500), + }).default(), + }).default(), + encryptionKey: Joi.when(Joi.ref('$dist'), { + is: true, + then: Joi.string(), + otherwise: Joi.string().default('a'.repeat(32)), + }), + roles: Joi.object({ + allow: Joi.array() + .items(Joi.string()) + .default(['reporting_user']), + }).default(), + index: Joi.string().default('.reporting'), + poll: Joi.object({ + jobCompletionNotifier: Joi.object({ + interval: Joi.number() + .integer() + .default(10000), + intervalErrorMultiplier: Joi.number() + .integer() + .default(5), + }).default(), + jobsRefresh: Joi.object({ + interval: Joi.number() + .integer() + .default(5000), + intervalErrorMultiplier: Joi.number() + .integer() + .default(5), + }).default(), + }).default(), + }).default(); +} diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts index 9085fb3cbc876..468caf93ec5dd 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts @@ -5,27 +5,33 @@ */ import { cryptoFactory } from '../../../server/lib/crypto'; +import { createMockServer } from '../../../test_helpers'; import { Logger } from '../../../types'; import { decryptJobHeaders } from './decrypt_job_headers'; -const encryptHeaders = async (encryptionKey: string, headers: Record) => { - const crypto = cryptoFactory(encryptionKey); +let mockServer: any; +beforeEach(() => { + mockServer = createMockServer(''); +}); + +const encryptHeaders = async (headers: Record) => { + const crypto = cryptoFactory(mockServer); return await crypto.encrypt(headers); }; describe('headers', () => { test(`fails if it can't decrypt headers`, async () => { - const getDecryptedHeaders = () => + await expect( decryptJobHeaders({ - encryptionKey: 'abcsecretsauce', job: { headers: 'Q53+9A+zf+Xe+ceR/uB/aR/Sw/8e+M+qR+WiG+8z+EY+mo+HiU/zQL+Xn', }, logger: ({ error: jest.fn(), } as unknown) as Logger, - }); - await expect(getDecryptedHeaders()).rejects.toMatchInlineSnapshot( + server: mockServer, + }) + ).rejects.toMatchInlineSnapshot( `[Error: Failed to decrypt report job data. Please ensure that xpack.reporting.encryptionKey is set and re-generate this report. Error: Invalid IV length]` ); }); @@ -36,15 +42,15 @@ describe('headers', () => { baz: 'quix', }; - const encryptedHeaders = await encryptHeaders('abcsecretsauce', headers); + const encryptedHeaders = await encryptHeaders(headers); const decryptedHeaders = await decryptJobHeaders({ - encryptionKey: 'abcsecretsauce', job: { title: 'cool-job-bro', type: 'csv', headers: encryptedHeaders, }, logger: {} as Logger, + server: mockServer, }); expect(decryptedHeaders).toEqual(headers); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts index 6f415d7ee5ea9..436b2c2dab1ad 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts @@ -6,7 +6,7 @@ import { i18n } from '@kbn/i18n'; import { cryptoFactory } from '../../../server/lib/crypto'; -import { CryptoFactory, Logger } from '../../../types'; +import { CryptoFactory, ServerFacade, Logger } from '../../../types'; interface HasEncryptedHeaders { headers?: string; @@ -17,15 +17,15 @@ export const decryptJobHeaders = async < JobParamsType, JobDocPayloadType extends HasEncryptedHeaders >({ - encryptionKey, + server, job, logger, }: { - encryptionKey?: string; + server: ServerFacade; job: JobDocPayloadType; logger: Logger; }): Promise> => { - const crypto: CryptoFactory = cryptoFactory(encryptionKey); + const crypto: CryptoFactory = cryptoFactory(server); try { const decryptedHeaders: Record = await crypto.decrypt(job.headers); return decryptedHeaders; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts index 09527621fa49f..eedb742ad7597 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts @@ -4,33 +4,27 @@ * you may not use this file except in compliance with the Elastic License. */ -import sinon from 'sinon'; -import { createMockReportingCore } from '../../../test_helpers'; -import { ReportingConfig, ReportingCore } from '../../../server/types'; +import { createMockReportingCore, createMockServer } from '../../../test_helpers'; +import { ReportingCore } from '../../../server'; import { JobDocPayload } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getConditionalHeaders, getCustomLogo } from './index'; -let mockConfig: ReportingConfig; let mockReportingPlugin: ReportingCore; - -const getMockConfig = (mockConfigGet: sinon.SinonStub) => ({ - get: mockConfigGet, - kbnConfig: { get: mockConfigGet }, -}); - +let mockServer: any; beforeEach(async () => { mockReportingPlugin = await createMockReportingCore(); - - const mockConfigGet = sinon - .stub() - .withArgs('kibanaServer', 'hostname') - .returns('custom-hostname'); - mockConfig = getMockConfig(mockConfigGet); + mockServer = createMockServer(''); }); describe('conditions', () => { test(`uses hostname from reporting config if set`, async () => { + const settings: any = { + 'xpack.reporting.kibanaServer.hostname': 'custom-hostname', + }; + + mockServer = createMockServer({ settings }); + const permittedHeaders = { foo: 'bar', baz: 'quix', @@ -39,20 +33,121 @@ describe('conditions', () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - config: mockConfig, + server: mockServer, }); expect(conditionalHeaders.conditions.hostname).toEqual( - mockConfig.get('kibanaServer', 'hostname') + mockServer.config().get('xpack.reporting.kibanaServer.hostname') ); - expect(conditionalHeaders.conditions.port).toEqual(mockConfig.get('kibanaServer', 'port')); - expect(conditionalHeaders.conditions.protocol).toEqual( - mockConfig.get('kibanaServer', 'protocol') + }); + + test(`uses hostname from server.config if reporting config not set`, async () => { + const permittedHeaders = { + foo: 'bar', + baz: 'quix', + }; + + const conditionalHeaders = await getConditionalHeaders({ + job: {} as JobDocPayload, + filteredHeaders: permittedHeaders, + server: mockServer, + }); + + expect(conditionalHeaders.conditions.hostname).toEqual(mockServer.config().get('server.host')); + }); + + test(`uses port from reporting config if set`, async () => { + const settings = { + 'xpack.reporting.kibanaServer.port': 443, + }; + + mockServer = createMockServer({ settings }); + + const permittedHeaders = { + foo: 'bar', + baz: 'quix', + }; + + const conditionalHeaders = await getConditionalHeaders({ + job: {} as JobDocPayload, + filteredHeaders: permittedHeaders, + server: mockServer, + }); + + expect(conditionalHeaders.conditions.port).toEqual( + mockServer.config().get('xpack.reporting.kibanaServer.port') ); + }); + + test(`uses port from server if reporting config not set`, async () => { + const permittedHeaders = { + foo: 'bar', + baz: 'quix', + }; + + const conditionalHeaders = await getConditionalHeaders({ + job: {} as JobDocPayload, + filteredHeaders: permittedHeaders, + server: mockServer, + }); + + expect(conditionalHeaders.conditions.port).toEqual(mockServer.config().get('server.port')); + }); + + test(`uses basePath from server config`, async () => { + const permittedHeaders = { + foo: 'bar', + baz: 'quix', + }; + + const conditionalHeaders = await getConditionalHeaders({ + job: {} as JobDocPayload, + filteredHeaders: permittedHeaders, + server: mockServer, + }); + expect(conditionalHeaders.conditions.basePath).toEqual( - mockConfig.kbnConfig.get('server', 'basePath') + mockServer.config().get('server.basePath') ); }); + + test(`uses protocol from reporting config if set`, async () => { + const settings = { + 'xpack.reporting.kibanaServer.protocol': 'https', + }; + + mockServer = createMockServer({ settings }); + + const permittedHeaders = { + foo: 'bar', + baz: 'quix', + }; + + const conditionalHeaders = await getConditionalHeaders({ + job: {} as JobDocPayload, + filteredHeaders: permittedHeaders, + server: mockServer, + }); + + expect(conditionalHeaders.conditions.protocol).toEqual( + mockServer.config().get('xpack.reporting.kibanaServer.protocol') + ); + }); + + test(`uses protocol from server.info`, async () => { + const permittedHeaders = { + foo: 'bar', + baz: 'quix', + }; + + const conditionalHeaders = await getConditionalHeaders({ + job: {} as JobDocPayload, + filteredHeaders: permittedHeaders, + server: mockServer, + }); + + expect(conditionalHeaders.conditions.protocol).toEqual(mockServer.info.protocol); + }); }); test('uses basePath from job when creating saved object service', async () => { @@ -66,14 +161,14 @@ test('uses basePath from job when creating saved object service', async () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - config: mockConfig, + server: mockServer, }); const jobBasePath = '/sbp/s/marketing'; await getCustomLogo({ reporting: mockReportingPlugin, job: { basePath: jobBasePath } as JobDocPayloadPDF, conditionalHeaders, - config: mockConfig, + server: mockServer, }); const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; @@ -84,11 +179,6 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav const mockGetSavedObjectsClient = jest.fn(); mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; - const mockConfigGet = sinon.stub(); - mockConfigGet.withArgs('kibanaServer', 'hostname').returns('localhost'); - mockConfigGet.withArgs('server', 'basePath').returns('/sbp'); - mockConfig = getMockConfig(mockConfigGet); - const permittedHeaders = { foo: 'bar', baz: 'quix', @@ -96,14 +186,14 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - config: mockConfig, + server: mockServer, }); await getCustomLogo({ reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, - config: mockConfig, + server: mockServer, }); const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; @@ -135,26 +225,19 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav describe('config formatting', () => { test(`lowercases server.host`, async () => { - const mockConfigGet = sinon - .stub() - .withArgs('server', 'host') - .returns('COOL-HOSTNAME'); - mockConfig = getMockConfig(mockConfigGet); - + mockServer = createMockServer({ settings: { 'server.host': 'COOL-HOSTNAME' } }); const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: {}, - config: mockConfig, + server: mockServer, }); expect(conditionalHeaders.conditions.hostname).toEqual('cool-hostname'); }); - test(`lowercases kibanaServer.hostname`, async () => { - const mockConfigGet = sinon - .stub() - .withArgs('kibanaServer', 'hostname') - .returns('GREAT-HOSTNAME'); - mockConfig = getMockConfig(mockConfigGet); + test(`lowercases xpack.reporting.kibanaServer.hostname`, async () => { + mockServer = createMockServer({ + settings: { 'xpack.reporting.kibanaServer.hostname': 'GREAT-HOSTNAME' }, + }); const conditionalHeaders = await getConditionalHeaders({ job: { title: 'cool-job-bro', @@ -166,7 +249,7 @@ describe('config formatting', () => { }, }, filteredHeaders: {}, - config: mockConfig, + server: mockServer, }); expect(conditionalHeaders.conditions.hostname).toEqual('great-hostname'); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts index bd7999d697ca9..975060a8052f0 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts @@ -3,31 +3,29 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - -import { ReportingConfig } from '../../../server/types'; -import { ConditionalHeaders } from '../../../types'; +import { ConditionalHeaders, ServerFacade } from '../../../types'; export const getConditionalHeaders = ({ - config, + server, job, filteredHeaders, }: { - config: ReportingConfig; + server: ServerFacade; job: JobDocPayloadType; filteredHeaders: Record; }) => { - const { kbnConfig } = config; + const config = server.config(); const [hostname, port, basePath, protocol] = [ - config.get('kibanaServer', 'hostname'), - config.get('kibanaServer', 'port'), - kbnConfig.get('server', 'basePath'), - config.get('kibanaServer', 'protocol'), + config.get('xpack.reporting.kibanaServer.hostname') || config.get('server.host'), + config.get('xpack.reporting.kibanaServer.port') || config.get('server.port'), + config.get('server.basePath'), + config.get('xpack.reporting.kibanaServer.protocol') || server.info.protocol, ] as [string, number, string, string]; const conditionalHeaders: ConditionalHeaders = { headers: filteredHeaders, conditions: { - hostname: hostname ? hostname.toLowerCase() : hostname, + hostname: hostname.toLowerCase(), port, basePath, protocol, diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts index 7c4c889e3e14f..fa53f474dfba7 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts @@ -5,18 +5,16 @@ */ import { ReportingCore } from '../../../server'; -import { createMockReportingCore } from '../../../test_helpers'; +import { createMockReportingCore, createMockServer } from '../../../test_helpers'; +import { ServerFacade } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getConditionalHeaders, getCustomLogo } from './index'; -const mockConfigGet = jest.fn().mockImplementation((key: string) => { - return 'localhost'; -}); -const mockConfig = { get: mockConfigGet, kbnConfig: { get: mockConfigGet } }; - let mockReportingPlugin: ReportingCore; +let mockServer: ServerFacade; beforeEach(async () => { mockReportingPlugin = await createMockReportingCore(); + mockServer = createMockServer(''); }); test(`gets logo from uiSettings`, async () => { @@ -39,14 +37,14 @@ test(`gets logo from uiSettings`, async () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayloadPDF, filteredHeaders: permittedHeaders, - config: mockConfig, + server: mockServer, }); const { logo } = await getCustomLogo({ reporting: mockReportingPlugin, - config: mockConfig, job: {} as JobDocPayloadPDF, conditionalHeaders, + server: mockServer, }); expect(mockGet).toBeCalledWith('xpackReporting:customPdfLogo'); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts index a13f992e7867c..7af5edab41ab7 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts @@ -5,22 +5,23 @@ */ import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../common/constants'; -import { ReportingConfig, ReportingCore } from '../../../server/types'; -import { ConditionalHeaders } from '../../../types'; +import { ReportingCore } from '../../../server'; +import { ConditionalHeaders, ServerFacade } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only export const getCustomLogo = async ({ reporting, - config, + server, job, conditionalHeaders, }: { reporting: ReportingCore; - config: ReportingConfig; + server: ServerFacade; job: JobDocPayloadPDF; conditionalHeaders: ConditionalHeaders; }) => { - const serverBasePath: string = config.kbnConfig.get('server', 'basePath'); + const serverBasePath: string = server.config().get('server.basePath'); + const fakeRequest: any = { headers: conditionalHeaders.headers, // This is used by the spaces SavedObjectClientWrapper to determine the existing space. diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts index 5f55617724ff6..27e772195f726 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts @@ -4,41 +4,29 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ReportingConfig } from '../../../server'; +import { createMockServer } from '../../../test_helpers'; +import { ServerFacade } from '../../../types'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getFullUrls } from './get_full_urls'; interface FullUrlsOpts { job: JobDocPayloadPNG & JobDocPayloadPDF; - config: ReportingConfig; + server: ServerFacade; + conditionalHeaders: any; } -let mockConfig: ReportingConfig; -const getMockConfig = (mockConfigGet: jest.Mock) => { - return { - get: mockConfigGet, - kbnConfig: { get: mockConfigGet }, - }; -}; - +let mockServer: any; beforeEach(() => { - const reportingConfig: Record = { - 'kibanaServer.hostname': 'localhost', - 'kibanaServer.port': 5601, - 'kibanaServer.protocol': 'http', - 'server.basePath': '/sbp', - }; - const mockConfigGet = jest.fn().mockImplementation((...keys: string[]) => { - return reportingConfig[keys.join('.') as string]; - }); - mockConfig = getMockConfig(mockConfigGet); + mockServer = createMockServer(''); }); -const getMockJob = (base: object) => base as JobDocPayloadPNG & JobDocPayloadPDF; - test(`fails if no URL is passed`, async () => { - const fn = () => getFullUrls({ job: getMockJob({}), config: mockConfig } as FullUrlsOpts); + const fn = () => + getFullUrls({ + job: {}, + server: mockServer, + } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"No valid URL fields found in Job Params! Expected \`job.relativeUrl: string\` or \`job.relativeUrls: string[]\`"` ); @@ -49,8 +37,8 @@ test(`fails if URLs are file-protocols for PNGs`, async () => { const relativeUrl = 'file://etc/passwd/#/something'; const fn = () => getFullUrls({ - job: getMockJob({ relativeUrl, forceNow }), - config: mockConfig, + job: { relativeUrl, forceNow }, + server: mockServer, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"` @@ -63,8 +51,8 @@ test(`fails if URLs are absolute for PNGs`, async () => { 'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something'; const fn = () => getFullUrls({ - job: getMockJob({ relativeUrl, forceNow }), - config: mockConfig, + job: { relativeUrl, forceNow }, + server: mockServer, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"` @@ -76,11 +64,11 @@ test(`fails if URLs are file-protocols for PDF`, async () => { const relativeUrl = 'file://etc/passwd/#/something'; const fn = () => getFullUrls({ - job: getMockJob({ + job: { relativeUrls: [relativeUrl], forceNow, - }), - config: mockConfig, + }, + server: mockServer, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"` @@ -93,11 +81,11 @@ test(`fails if URLs are absolute for PDF`, async () => { 'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something'; const fn = () => getFullUrls({ - job: getMockJob({ + job: { relativeUrls: [relativeUrl], forceNow, - }), - config: mockConfig, + }, + server: mockServer, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"` @@ -114,8 +102,8 @@ test(`fails if any URLs are absolute or file's for PDF`, async () => { const fn = () => getFullUrls({ - job: getMockJob({ relativeUrls, forceNow }), - config: mockConfig, + job: { relativeUrls, forceNow }, + server: mockServer, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something file://etc/passwd/#/something"` @@ -125,8 +113,8 @@ test(`fails if any URLs are absolute or file's for PDF`, async () => { test(`fails if URL does not route to a visualization`, async () => { const fn = () => getFullUrls({ - job: getMockJob({ relativeUrl: '/app/phoney' }), - config: mockConfig, + job: { relativeUrl: '/app/phoney' }, + server: mockServer, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"No valid hash in the URL! A hash is expected for the application to route to the intended visualization."` @@ -136,8 +124,8 @@ test(`fails if URL does not route to a visualization`, async () => { test(`adds forceNow to hash's query, if it exists`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: getMockJob({ relativeUrl: '/app/kibana#/something', forceNow }), - config: mockConfig, + job: { relativeUrl: '/app/kibana#/something', forceNow }, + server: mockServer, } as FullUrlsOpts); expect(urls[0]).toEqual( @@ -149,8 +137,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: getMockJob({ relativeUrl: '/app/kibana#/something?_g=something', forceNow }), - config: mockConfig, + job: { relativeUrl: '/app/kibana#/something?_g=something', forceNow }, + server: mockServer, } as FullUrlsOpts); expect(urls[0]).toEqual( @@ -160,8 +148,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => { test(`doesn't append forceNow query to url, if it doesn't exists`, async () => { const urls = await getFullUrls({ - job: getMockJob({ relativeUrl: '/app/kibana#/something' }), - config: mockConfig, + job: { relativeUrl: '/app/kibana#/something' }, + server: mockServer, } as FullUrlsOpts); expect(urls[0]).toEqual('http://localhost:5601/sbp/app/kibana#/something'); @@ -170,7 +158,7 @@ test(`doesn't append forceNow query to url, if it doesn't exists`, async () => { test(`adds forceNow to each of multiple urls`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: getMockJob({ + job: { relativeUrls: [ '/app/kibana#/something_aaa', '/app/kibana#/something_bbb', @@ -178,8 +166,8 @@ test(`adds forceNow to each of multiple urls`, async () => { '/app/kibana#/something_ddd', ], forceNow, - }), - config: mockConfig, + }, + server: mockServer, } as FullUrlsOpts); expect(urls).toEqual([ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts index c4b6f31019fdf..ca64d8632dbfe 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts @@ -12,7 +12,7 @@ import { } from 'url'; import { getAbsoluteUrlFactory } from '../../../common/get_absolute_url'; import { validateUrls } from '../../../common/validate_urls'; -import { ReportingConfig } from '../../../server/types'; +import { ServerFacade } from '../../../types'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; @@ -24,23 +24,19 @@ function isPdfJob(job: JobDocPayloadPNG | JobDocPayloadPDF): job is JobDocPayloa } export function getFullUrls({ - config, + server, job, }: { - config: ReportingConfig; + server: ServerFacade; job: JobDocPayloadPDF | JobDocPayloadPNG; }) { - const [basePath, protocol, hostname, port] = [ - config.kbnConfig.get('server', 'basePath'), - config.get('kibanaServer', 'protocol'), - config.get('kibanaServer', 'hostname'), - config.get('kibanaServer', 'port'), - ] as string[]; + const config = server.config(); + const getAbsoluteUrl = getAbsoluteUrlFactory({ - defaultBasePath: basePath, - protocol, - hostname, - port, + defaultBasePath: config.get('server.basePath'), + protocol: config.get('xpack.reporting.kibanaServer.protocol') || server.info.protocol, + hostname: config.get('xpack.reporting.kibanaServer.hostname') || config.get('server.host'), + port: config.get('xpack.reporting.kibanaServer.port') || config.get('server.port'), }); // PDF and PNG job params put in the url differently diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts index 07fceb603e451..0cb83352d4606 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts @@ -3,18 +3,17 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - -import { CaptureConfig } from '../../../server/types'; +import { ServerFacade } from '../../../types'; import { LayoutTypes } from '../constants'; import { Layout, LayoutParams } from './layout'; import { PreserveLayout } from './preserve_layout'; import { PrintLayout } from './print_layout'; -export function createLayout(captureConfig: CaptureConfig, layoutParams?: LayoutParams): Layout { +export function createLayout(server: ServerFacade, layoutParams?: LayoutParams): Layout { if (layoutParams && layoutParams.id === LayoutTypes.PRESERVE_LAYOUT) { return new PreserveLayout(layoutParams.dimensions); } // this is the default because some jobs won't have anything specified - return new PrintLayout(captureConfig); + return new PrintLayout(server); } diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts index 98d8dc2983653..6007c2960057a 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts @@ -3,12 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - import path from 'path'; import { EvaluateFn, SerializableOrJSHandle } from 'puppeteer'; -import { HeadlessChromiumDriver } from '../../../server/browsers'; import { LevelLogger } from '../../../server/lib'; -import { ReportingConfigType } from '../../../server/core'; +import { HeadlessChromiumDriver } from '../../../server/browsers'; +import { ServerFacade } from '../../../types'; import { LayoutTypes } from '../constants'; import { getDefaultLayoutSelectors, Layout, LayoutSelectorDictionary, Size } from './layout'; import { CaptureConfig } from './types'; @@ -21,9 +20,9 @@ export class PrintLayout extends Layout { public readonly groupCount = 2; private captureConfig: CaptureConfig; - constructor(captureConfig: ReportingConfigType['capture']) { + constructor(server: ServerFacade) { super(LayoutTypes.PRINT); - this.captureConfig = captureConfig; + this.captureConfig = server.config().get('xpack.reporting.capture'); } public getCssOverridesPath() { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts index 57d025890d3e2..16eb433e8a75e 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts @@ -7,16 +7,17 @@ import { i18n } from '@kbn/i18n'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; -import { CaptureConfig } from '../../../../server/types'; +import { ServerFacade } from '../../../../types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_GETNUMBEROFITEMS, CONTEXT_READMETADATA } from './constants'; export const getNumberOfItems = async ( - captureConfig: CaptureConfig, + server: ServerFacade, browser: HeadlessBrowser, layout: LayoutInstance, logger: LevelLogger ): Promise => { + const config = server.config(); const { renderComplete: renderCompleteSelector, itemsCountAttribute } = layout.selectors; let itemsCount: number; @@ -32,7 +33,7 @@ export const getNumberOfItems = async ( // we have to use this hint to wait for all of them await browser.waitForSelector( `${renderCompleteSelector},[${itemsCountAttribute}]`, - { timeout: captureConfig.timeouts.waitForElements }, + { timeout: config.get('xpack.reporting.capture.timeouts.waitForElements') }, { context: CONTEXT_READMETADATA }, logger ); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts index 75ac3dca4ffa0..13d07bcdd6baf 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts @@ -19,9 +19,12 @@ import * as Rx from 'rxjs'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { loggingServiceMock } from '../../../../../../../../src/core/server/mocks'; import { LevelLogger } from '../../../../server/lib'; -import { createMockBrowserDriverFactory, createMockLayoutInstance } from '../../../../test_helpers'; +import { + createMockBrowserDriverFactory, + createMockLayoutInstance, + createMockServer, +} from '../../../../test_helpers'; import { ConditionalHeaders, HeadlessChromiumDriver } from '../../../../types'; -import { CaptureConfig } from '../../../../server/types'; import { screenshotsObservableFactory } from './observable'; import { ElementsPositionAndAttribute } from './types'; @@ -31,8 +34,8 @@ import { ElementsPositionAndAttribute } from './types'; const mockLogger = jest.fn(loggingServiceMock.create); const logger = new LevelLogger(mockLogger()); -const mockConfig = { timeouts: { openUrl: 13 } } as CaptureConfig; -const mockLayout = createMockLayoutInstance(mockConfig); +const __LEGACY = createMockServer({ settings: { 'xpack.reporting.capture': { loadDelay: 13 } } }); +const mockLayout = createMockLayoutInstance(__LEGACY); /* * Tests @@ -45,7 +48,7 @@ describe('Screenshot Observable Pipeline', () => { }); it('pipelines a single url into screenshot and timeRange', async () => { - const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); const result = await getScreenshots$({ logger, urls: ['/welcome/home/start/index.htm'], @@ -83,7 +86,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); const result = await getScreenshots$({ logger, urls: ['/welcome/home/start/index2.htm', '/welcome/home/start/index.php3?page=./home.php'], @@ -133,7 +136,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); const getScreenshot = async () => { return await getScreenshots$({ logger, @@ -194,7 +197,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); const getScreenshot = async () => { return await getScreenshots$({ logger, diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts index 53a11c18abd79..44c04c763f840 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts @@ -6,22 +6,24 @@ import * as Rx from 'rxjs'; import { catchError, concatMap, first, mergeMap, take, takeUntil, toArray } from 'rxjs/operators'; -import { CaptureConfig } from '../../../../server/types'; -import { HeadlessChromiumDriverFactory } from '../../../../types'; +import { CaptureConfig, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; import { getElementPositionAndAttributes } from './get_element_position_data'; import { getNumberOfItems } from './get_number_of_items'; import { getScreenshots } from './get_screenshots'; import { getTimeRange } from './get_time_range'; -import { injectCustomCss } from './inject_css'; import { openUrl } from './open_url'; import { ScreenSetupData, ScreenshotObservableOpts, ScreenshotResults } from './types'; import { waitForRenderComplete } from './wait_for_render'; import { waitForVisualizations } from './wait_for_visualizations'; +import { injectCustomCss } from './inject_css'; export function screenshotsObservableFactory( - captureConfig: CaptureConfig, + server: ServerFacade, browserDriverFactory: HeadlessChromiumDriverFactory ) { + const config = server.config(); + const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); + return function screenshotsObservable({ logger, urls, @@ -39,13 +41,13 @@ export function screenshotsObservableFactory( mergeMap(({ driver, exit$ }) => { const setup$: Rx.Observable = Rx.of(1).pipe( takeUntil(exit$), - mergeMap(() => openUrl(captureConfig, driver, url, conditionalHeaders, logger)), - mergeMap(() => getNumberOfItems(captureConfig, driver, layout, logger)), + mergeMap(() => openUrl(server, driver, url, conditionalHeaders, logger)), + mergeMap(() => getNumberOfItems(server, driver, layout, logger)), mergeMap(async itemsCount => { const viewport = layout.getViewport(itemsCount); await Promise.all([ driver.setViewport(viewport, logger), - waitForVisualizations(captureConfig, driver, itemsCount, layout, logger), + waitForVisualizations(server, driver, itemsCount, layout, logger), ]); }), mergeMap(async () => { @@ -58,7 +60,7 @@ export function screenshotsObservableFactory( await layout.positionElements(driver, logger); } - await waitForRenderComplete(captureConfig, driver, layout, logger); + await waitForRenderComplete(driver, layout, captureConfig, logger); }), mergeMap(async () => { return await Promise.all([ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts index a484dfb243563..fbae1f91a7a6a 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts @@ -5,26 +5,27 @@ */ import { i18n } from '@kbn/i18n'; -import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; +import { ConditionalHeaders, ServerFacade } from '../../../../types'; import { LevelLogger } from '../../../../server/lib'; -import { CaptureConfig } from '../../../../server/types'; -import { ConditionalHeaders } from '../../../../types'; +import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { PAGELOAD_SELECTOR } from '../../constants'; export const openUrl = async ( - captureConfig: CaptureConfig, + server: ServerFacade, browser: HeadlessBrowser, url: string, conditionalHeaders: ConditionalHeaders, logger: LevelLogger ): Promise => { + const config = server.config(); + try { await browser.open( url, { conditionalHeaders, waitForSelector: PAGELOAD_SELECTOR, - timeout: captureConfig.timeouts.openUrl, + timeout: config.get('xpack.reporting.capture.timeouts.openUrl'), }, logger ); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts index 76613c2d631d6..ab81a952f345c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts @@ -4,8 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElementPosition, ConditionalHeaders } from '../../../../types'; import { LevelLogger } from '../../../../server/lib'; -import { ConditionalHeaders, ElementPosition } from '../../../../types'; import { LayoutInstance } from '../../layouts/layout'; export interface ScreenshotObservableOpts { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts index 069896c8d9e90..2f6dc2829dfd8 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts @@ -5,16 +5,16 @@ */ import { i18n } from '@kbn/i18n'; +import { CaptureConfig } from '../../../../types'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; -import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_WAITFORRENDER } from './constants'; export const waitForRenderComplete = async ( - captureConfig: CaptureConfig, browser: HeadlessBrowser, layout: LayoutInstance, + captureConfig: CaptureConfig, logger: LevelLogger ) => { logger.debug( diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts index 7960e1552e559..93ad40026dff8 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts @@ -5,9 +5,9 @@ */ import { i18n } from '@kbn/i18n'; +import { ServerFacade } from '../../../../types'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; -import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_WAITFORELEMENTSTOBEINDOM } from './constants'; @@ -23,12 +23,13 @@ const getCompletedItemsCount = ({ renderCompleteSelector }: SelectorArgs) => { * 3. Wait for the render complete event to be fired once for each item */ export const waitForVisualizations = async ( - captureConfig: CaptureConfig, + server: ServerFacade, browser: HeadlessBrowser, itemsCount: number, layout: LayoutInstance, logger: LevelLogger ): Promise => { + const config = server.config(); const { renderComplete: renderCompleteSelector } = layout.selectors; logger.debug( @@ -44,7 +45,7 @@ export const waitForVisualizations = async ( fn: getCompletedItemsCount, args: [{ renderCompleteSelector }], toEqual: itemsCount, - timeout: captureConfig.timeouts.renderComplete, + timeout: config.get('xpack.reporting.capture.timeouts.renderComplete'), }, { context: CONTEXT_WAITFORELEMENTSTOBEINDOM }, logger diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts index b87403ac74f89..7ea67277015ab 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts @@ -11,14 +11,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, + ServerFacade, } from '../../../types'; import { JobParamsDiscoverCsv } from '../types'; export const createJobFactory: CreateJobFactory> = async function createJobFactoryFn(reporting: ReportingCore) { - const config = await reporting.getConfig(); - const crypto = cryptoFactory(config.get('encryptionKey')); +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { + const crypto = cryptoFactory(server); return async function createJob( jobParams: JobParamsDiscoverCsv, diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index 7dfa705901fbe..f12916b734dbf 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -36,12 +36,11 @@ describe('CSV Execute Job', function() { let defaultElasticsearchResponse; let encryptedHeaders; - let clusterStub; - let configGetStub; - let mockReportingConfig; + let cancellationToken; let mockReportingPlugin; + let mockServer; + let clusterStub; let callAsCurrentUserStub; - let cancellationToken; const mockElasticsearch = { dataClient: { @@ -59,17 +58,7 @@ describe('CSV Execute Job', function() { beforeEach(async function() { mockReportingPlugin = await createMockReportingCore(); - - configGetStub = sinon.stub(); - configGetStub.withArgs('encryptionKey').returns(encryptionKey); - configGetStub.withArgs('csv', 'maxSizeBytes').returns(1024 * 1000); // 1mB - configGetStub.withArgs('csv', 'scroll').returns({}); - mockReportingConfig = { get: configGetStub, kbnConfig: { get: configGetStub } }; - - mockReportingPlugin.getConfig = () => Promise.resolve(mockReportingConfig); - mockReportingPlugin.getUiSettingsServiceFactory = () => Promise.resolve(mockUiSettingsClient); - mockReportingPlugin.getElasticsearchService = () => Promise.resolve(mockElasticsearch); - + mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; cancellationToken = new CancellationToken(); defaultElasticsearchResponse = { @@ -86,6 +75,7 @@ describe('CSV Execute Job', function() { .stub(clusterStub, 'callAsCurrentUser') .resolves(defaultElasticsearchResponse); + const configGetStub = sinon.stub(); mockUiSettingsClient.get.withArgs('csv:separator').returns(','); mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); @@ -103,11 +93,36 @@ describe('CSV Execute Job', function() { return fieldFormatsRegistry; }, }); + + mockServer = { + config: function() { + return { + get: configGetStub, + }; + }, + }; + mockServer + .config() + .get.withArgs('xpack.reporting.encryptionKey') + .returns(encryptionKey); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.maxSizeBytes') + .returns(1024 * 1000); // 1mB + mockServer + .config() + .get.withArgs('xpack.reporting.csv.scroll') + .returns({}); }); describe('basic Elasticsearch call behavior', function() { it('should decrypt encrypted headers and pass to callAsCurrentUser', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -123,7 +138,12 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const job = { headers: encryptedHeaders, fields: [], @@ -150,7 +170,12 @@ describe('CSV Execute Job', function() { _scroll_id: scrollId, }); callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -164,7 +189,12 @@ describe('CSV Execute Job', function() { }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -194,7 +224,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -229,7 +264,12 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -257,7 +297,12 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -276,7 +321,10 @@ describe('CSV Execute Job', function() { describe('Cells with formula values', () => { it('returns `csv_contains_formulas` when cells contain formulas', async function() { - configGetStub.withArgs('csv', 'checkForFormulas').returns(true); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.checkForFormulas') + .returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], @@ -284,7 +332,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -301,7 +354,10 @@ describe('CSV Execute Job', function() { }); it('returns warnings when headings contain formulas', async function() { - configGetStub.withArgs('csv', 'checkForFormulas').returns(true); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.checkForFormulas') + .returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }], @@ -309,7 +365,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -326,7 +387,10 @@ describe('CSV Execute Job', function() { }); it('returns no warnings when cells have no formulas', async function() { - configGetStub.withArgs('csv', 'checkForFormulas').returns(true); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.checkForFormulas') + .returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -334,7 +398,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -351,7 +420,10 @@ describe('CSV Execute Job', function() { }); it('returns no warnings when configured not to', async () => { - configGetStub.withArgs('csv', 'checkForFormulas').returns(false); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.checkForFormulas') + .returns(false); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], @@ -359,7 +431,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -379,7 +456,12 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { callAsCurrentUserStub.rejects(new Error()); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -398,7 +480,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); callAsCurrentUserStub.onSecondCall().rejects(new Error()); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -419,7 +506,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -440,7 +532,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -468,7 +565,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -496,7 +598,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -532,7 +639,12 @@ describe('CSV Execute Job', function() { }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -547,7 +659,12 @@ describe('CSV Execute Job', function() { }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -561,7 +678,12 @@ describe('CSV Execute Job', function() { }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -579,7 +701,12 @@ describe('CSV Execute Job', function() { describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -591,7 +718,12 @@ describe('CSV Execute Job', function() { it('should use custom uiSettings csv:separator for header', async function() { mockUiSettingsClient.get.withArgs('csv:separator').returns(';'); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -603,7 +735,12 @@ describe('CSV Execute Job', function() { it('should escape column headers if uiSettings csv:quoteValues is true', async function() { mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -615,7 +752,12 @@ describe('CSV Execute Job', function() { it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -626,7 +768,12 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], @@ -646,7 +793,12 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -667,7 +819,12 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -695,7 +852,12 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -735,9 +897,17 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - configGetStub.withArgs('csv', 'maxSizeBytes').returns(1); - - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.maxSizeBytes') + .returns(1); + + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -765,9 +935,17 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); - - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.maxSizeBytes') + .returns(9); + + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -795,7 +973,10 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.maxSizeBytes') + .returns(9); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -804,7 +985,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -834,7 +1020,10 @@ describe('CSV Execute Job', function() { beforeEach(async function() { mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; - configGetStub.withArgs('csv', 'maxSizeBytes').returns(18); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.maxSizeBytes') + .returns(18); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -843,7 +1032,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -871,7 +1065,10 @@ describe('CSV Execute Job', function() { describe('scroll settings', function() { it('passes scroll duration to initial search call', async function() { const scrollDuration = 'test'; - configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration }); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.scroll') + .returns({ duration: scrollDuration }); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -880,7 +1077,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -897,7 +1099,10 @@ describe('CSV Execute Job', function() { it('passes scroll size to initial search call', async function() { const scrollSize = 100; - configGetStub.withArgs('csv', 'scroll').returns({ size: scrollSize }); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.scroll') + .returns({ size: scrollSize }); callAsCurrentUserStub.onFirstCall().resolves({ hits: { @@ -906,7 +1111,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -923,7 +1133,10 @@ describe('CSV Execute Job', function() { it('passes scroll duration to subsequent scroll call', async function() { const scrollDuration = 'test'; - configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration }); + mockServer + .config() + .get.withArgs('xpack.reporting.csv.scroll') + .returns({ duration: scrollDuration }); callAsCurrentUserStub.onFirstCall().resolves({ hits: { @@ -932,7 +1145,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index a8249e5810d3c..1579985891053 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -6,26 +6,32 @@ import { i18n } from '@kbn/i18n'; import Hapi from 'hapi'; -import { IUiSettingsClient, KibanaRequest } from '../../../../../../../src/core/server'; +import { + ElasticsearchServiceSetup, + IUiSettingsClient, + KibanaRequest, +} from '../../../../../../../src/core/server'; import { CSV_JOB_TYPE } from '../../../common/constants'; import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; import { getFieldFormats } from '../../../server/services'; -import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger } from '../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types'; import { JobDocPayloadDiscoverCsv } from '../types'; import { fieldFormatMapFactory } from './lib/field_format_map'; import { createGenerateCsv } from './lib/generate_csv'; export const executeJobFactory: ExecuteJobFactory> = async function executeJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { - const [config, elasticsearch] = await Promise.all([ - reporting.getConfig(), - reporting.getElasticsearchService(), - ]); - const crypto = cryptoFactory(config.get('encryptionKey')); +>> = async function executeJobFactoryFn( + reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + parentLogger: Logger +) { + const crypto = cryptoFactory(server); + const config = server.config(); const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']); - const serverBasePath = config.kbnConfig.get('server', 'basePath'); + const serverBasePath = config.get('server.basePath'); return async function executeJob( jobId: string, @@ -125,9 +131,9 @@ export const executeJobFactory: ExecuteJobFactory) { const response = await request; diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts index 529c195486bc6..842330fa7c93f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts @@ -5,8 +5,7 @@ */ import { CancellationToken } from '../../common/cancellation_token'; -import { ScrollConfig } from '../../server/types'; -import { JobDocPayload, JobParamPostPayload } from '../../types'; +import { JobDocPayload, JobParamPostPayload, ConditionalHeaders, RequestFacade } from '../../types'; interface DocValueField { field: string; @@ -107,7 +106,7 @@ export interface GenerateCsvParams { quoteValues: boolean; timezone: string | null; maxSizeBytes: number; - scroll: ScrollConfig; + scroll: { duration: string; size: number }; checkForFormulas?: boolean; }; } diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index 15a1c3e0a9fad..17072d311b35f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -5,11 +5,18 @@ */ import { notFound, notImplemented } from 'boom'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; import { cryptoFactory } from '../../../../server/lib'; -import { CreateJobFactory, ImmediateCreateJobFn, Logger, RequestFacade } from '../../../../types'; +import { + CreateJobFactory, + ImmediateCreateJobFn, + Logger, + RequestFacade, + ServerFacade, +} from '../../../../types'; import { JobDocPayloadPanelCsv, JobParamsPanelCsv, @@ -30,9 +37,13 @@ interface VisData { export const createJobFactory: CreateJobFactory> = async function createJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { - const config = await reporting.getConfig(); - const crypto = cryptoFactory(config.get('encryptionKey')); +>> = function createJobFactoryFn( + reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + parentLogger: Logger +) { + const crypto = cryptoFactory(server); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index debcdb47919f1..6bb3e73fcfe84 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; @@ -14,6 +15,7 @@ import { JobDocOutput, Logger, RequestFacade, + ServerFacade, } from '../../../types'; import { CsvResultFromSearch } from '../../csv/types'; import { FakeRequest, JobDocPayloadPanelCsv, JobParamsPanelCsv, SearchPanel } from '../types'; @@ -21,11 +23,15 @@ import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = async function executeJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { - const config = await reporting.getConfig(); - const crypto = cryptoFactory(config.get('encryptionKey')); +>> = async function executeJobFactoryFn( + reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + parentLogger: Logger +) { + const crypto = cryptoFactory(server); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); - const generateCsv = await createGenerateCsv(reporting, parentLogger); + const generateCsv = createGenerateCsv(reporting, server, elasticsearch, parentLogger); return async function executeJob( jobId: string | null, @@ -51,11 +57,11 @@ export const executeJobFactory: ExecuteJobFactory; + let decryptedHeaders; const serializedEncryptedHeaders = job.headers; try { decryptedHeaders = await crypto.decrypt(serializedEncryptedHeaders); @@ -73,7 +79,10 @@ export const executeJobFactory: ExecuteJobFactory { export async function generateCsvSearch( req: RequestFacade, reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger, searchPanel: SearchPanel, jobParams: JobParamsDiscoverCsv @@ -153,15 +159,11 @@ export async function generateCsvSearch( }, }; - const [elasticsearch, config] = await Promise.all([ - reporting.getElasticsearchService(), - reporting.getConfig(), - ]); - const { callAsCurrentUser } = elasticsearch.dataClient.asScoped( KibanaRequest.from(req.getRawRequest()) ); const callCluster = (...params: [string, object]) => callAsCurrentUser(...params); + const config = server.config(); const uiSettings = await getUiSettings(uiConfig); const generateCsvParams: GenerateCsvParams = { @@ -174,8 +176,8 @@ export async function generateCsvSearch( cancellationToken: new CancellationToken(), settings: { ...uiSettings, - maxSizeBytes: config.get('csv', 'maxSizeBytes'), - scroll: config.get('csv', 'scroll'), + maxSizeBytes: config.get('xpack.reporting.csv.maxSizeBytes'), + scroll: config.get('xpack.reporting.csv.scroll'), timezone, }, }; diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts index ab14d2dd8a660..6a7d5f336e238 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts @@ -4,10 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { JobDocPayload, JobParamPostPayload } from '../../types'; +import { JobParamPostPayload, JobDocPayload, ServerFacade } from '../../types'; export interface FakeRequest { - headers: Record; + headers: any; + server: ServerFacade; } export interface JobParamsPostPayloadPanelCsv extends JobParamPostPayload { diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts index 9aac612677094..a6911e1f14704 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts @@ -12,14 +12,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, + ServerFacade, } from '../../../../types'; import { JobParamsPNG } from '../../types'; export const createJobFactory: CreateJobFactory> = async function createJobFactoryFn(reporting: ReportingCore) { - const config = await reporting.getConfig(); - const crypto = cryptoFactory(config.get('encryptionKey')); +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { + const crypto = cryptoFactory(server); return async function createJob( { objectType, title, relativeUrl, browserTimezone, layout }: JobParamsPNG, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index 267321d33809d..e2e6ba1b89096 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -5,6 +5,7 @@ */ import * as Rx from 'rxjs'; +import { memoize } from 'lodash'; import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; @@ -13,70 +14,63 @@ import { LevelLogger } from '../../../../server/lib'; jest.mock('../lib/generate_png', () => ({ generatePngObservableFactory: jest.fn() })); -let mockReporting; -let mockReportingConfig; - const cancellationToken = { on: jest.fn(), }; -const mockLoggerFactory = { - get: jest.fn().mockImplementation(() => ({ - error: jest.fn(), - debug: jest.fn(), - warn: jest.fn(), - })), -}; -const getMockLogger = () => new LevelLogger(mockLoggerFactory); - -const mockEncryptionKey = 'abcabcsecuresecret'; -const encryptHeaders = async headers => { - const crypto = cryptoFactory(mockEncryptionKey); - return await crypto.encrypt(headers); -}; +let config; +let mockServer; +let mockReporting; beforeEach(async () => { mockReporting = await createMockReportingCore(); - const kbnConfig = { + config = { + 'xpack.reporting.encryptionKey': 'testencryptionkey', 'server.basePath': '/sbp', + 'server.host': 'localhost', + 'server.port': 5601, }; - const reportingConfig = { - encryptionKey: mockEncryptionKey, - 'kibanaServer.hostname': 'localhost', - 'kibanaServer.port': 5601, - 'kibanaServer.protocol': 'http', - }; - - const mockGetConfig = jest.fn(); - mockReportingConfig = { - get: (...keys) => reportingConfig[keys.join('.')], - kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, - }; - mockGetConfig.mockImplementation(() => Promise.resolve(mockReportingConfig)); - mockReporting.getConfig = mockGetConfig; - - const mockElasticsearch = { - dataClient: { - asScoped: () => ({ callAsCurrentUser: jest.fn() }), + mockServer = { + config: memoize(() => ({ get: jest.fn() })), + info: { + protocol: 'http', }, }; - const mockGetElasticsearch = jest.fn(); - mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); - mockReporting.getElasticsearchService = mockGetElasticsearch; + mockServer.config().get.mockImplementation(key => { + return config[key]; + }); generatePngObservableFactory.mockReturnValue(jest.fn()); }); afterEach(() => generatePngObservableFactory.mockReset()); +const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), + }, +}; + +const getMockLogger = () => new LevelLogger(); + +const encryptHeaders = async headers => { + const crypto = cryptoFactory(mockServer); + return await crypto.encrypt(headers); +}; + test(`passes browserTimezone to generatePng`, async () => { const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = await executeJobFactory(mockReporting, getMockLogger()); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -94,7 +88,15 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = await executeJobFactory(mockReporting, getMockLogger()); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger(), + { + browserDriverFactory: {}, + } + ); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -114,7 +116,15 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); - const executeJob = await executeJobFactory(mockReporting, getMockLogger()); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger(), + { + browserDriverFactory: {}, + } + ); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index c53c20efec247..8670f0027af89 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -4,11 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElasticsearchServiceSetup } from 'kibana/server'; import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PNG_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; -import { ESQueueWorkerExecuteFn, ExecuteJobFactory, JobDocOutput, Logger } from '../../../../types'; +import { + ESQueueWorkerExecuteFn, + ExecuteJobFactory, + JobDocOutput, + Logger, + ServerFacade, +} from '../../../../types'; import { decryptJobHeaders, getConditionalHeaders, @@ -22,24 +29,22 @@ type QueuedPngExecutorFactory = ExecuteJobFactory = Rx.of(1).pipe( - mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })), + mergeMap(() => decryptJobHeaders({ server, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), - map(filteredHeaders => getConditionalHeaders({ config, job, filteredHeaders })), + map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), mergeMap(conditionalHeaders => { - const urls = getFullUrls({ config, job }); + const urls = getFullUrls({ server, job }); const hashUrl = urls[0]; return generatePngObservable( jobLogger, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts index a15541d99f6fb..88e91982adc63 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts @@ -7,18 +7,17 @@ import * as Rx from 'rxjs'; import { map } from 'rxjs/operators'; import { LevelLogger } from '../../../../server/lib'; -import { CaptureConfig } from '../../../../server/types'; -import { ConditionalHeaders, HeadlessChromiumDriverFactory } from '../../../../types'; +import { ConditionalHeaders, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; import { LayoutParams } from '../../../common/layouts/layout'; import { PreserveLayout } from '../../../common/layouts/preserve_layout'; import { screenshotsObservableFactory } from '../../../common/lib/screenshots'; import { ScreenshotResults } from '../../../common/lib/screenshots/types'; export function generatePngObservableFactory( - captureConfig: CaptureConfig, + server: ServerFacade, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const screenshotsObservable = screenshotsObservableFactory(captureConfig, browserDriverFactory); + const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory); return function generatePngObservable( logger: LevelLogger, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts index 8e1d5404a5984..656c99991e1f6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts @@ -12,14 +12,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, + ServerFacade, } from '../../../../types'; import { JobParamsPDF } from '../../types'; export const createJobFactory: CreateJobFactory> = async function createJobFactoryFn(reporting: ReportingCore) { - const config = await reporting.getConfig(); - const crypto = cryptoFactory(config.get('encryptionKey')); +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { + const crypto = cryptoFactory(server); return async function createJobFn( { title, relativeUrls, browserTimezone, layout, objectType }: JobParamsPDF, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index 29769108bf4ac..484842ba18f2a 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -5,6 +5,7 @@ */ import * as Rx from 'rxjs'; +import { memoize } from 'lodash'; import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; @@ -13,65 +14,57 @@ import { LevelLogger } from '../../../../server/lib'; jest.mock('../lib/generate_pdf', () => ({ generatePdfObservableFactory: jest.fn() })); -let mockReporting; -let mockReportingConfig; - const cancellationToken = { on: jest.fn(), }; -const mockLoggerFactory = { - get: jest.fn().mockImplementation(() => ({ - error: jest.fn(), - debug: jest.fn(), - warn: jest.fn(), - })), -}; -const getMockLogger = () => new LevelLogger(mockLoggerFactory); - -const mockEncryptionKey = 'testencryptionkey'; -const encryptHeaders = async headers => { - const crypto = cryptoFactory(mockEncryptionKey); - return await crypto.encrypt(headers); -}; +let config; +let mockServer; +let mockReporting; beforeEach(async () => { mockReporting = await createMockReportingCore(); - const kbnConfig = { + config = { + 'xpack.reporting.encryptionKey': 'testencryptionkey', 'server.basePath': '/sbp', + 'server.host': 'localhost', + 'server.port': 5601, }; - const reportingConfig = { - encryptionKey: mockEncryptionKey, - 'kibanaServer.hostname': 'localhost', - 'kibanaServer.port': 5601, - 'kibanaServer.protocol': 'http', - }; - - const mockGetConfig = jest.fn(); - mockReportingConfig = { - get: (...keys) => reportingConfig[keys.join('.')], - kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, - }; - mockGetConfig.mockImplementation(() => Promise.resolve(mockReportingConfig)); - mockReporting.getConfig = mockGetConfig; - - const mockElasticsearch = { - dataClient: { - asScoped: () => ({ callAsCurrentUser: jest.fn() }), + mockServer = { + config: memoize(() => ({ get: jest.fn() })), + info: { + protocol: 'http', }, }; - const mockGetElasticsearch = jest.fn(); - mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); - mockReporting.getElasticsearchService = mockGetElasticsearch; + mockServer.config().get.mockImplementation(key => { + return config[key]; + }); generatePdfObservableFactory.mockReturnValue(jest.fn()); }); afterEach(() => generatePdfObservableFactory.mockReset()); +const getMockLogger = () => new LevelLogger(); +const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), + }, +}; + +const encryptHeaders = async headers => { + const crypto = cryptoFactory(mockServer); + return await crypto.encrypt(headers); +}; + test(`returns content_type of application/pdf`, async () => { - const executeJob = await executeJobFactory(mockReporting, getMockLogger()); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -91,7 +84,12 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); - const executeJob = await executeJobFactory(mockReporting, getMockLogger()); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts index e614db46c5730..535c2dcd439a7 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts @@ -4,11 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElasticsearchServiceSetup } from 'kibana/server'; import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PDF_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; -import { ESQueueWorkerExecuteFn, ExecuteJobFactory, JobDocOutput, Logger } from '../../../../types'; +import { + ESQueueWorkerExecuteFn, + ExecuteJobFactory, + JobDocOutput, + Logger, + ServerFacade, +} from '../../../../types'; import { decryptJobHeaders, getConditionalHeaders, @@ -23,25 +30,23 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory = Rx.of(1).pipe( - mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })), + mergeMap(() => decryptJobHeaders({ server, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), - map(filteredHeaders => getConditionalHeaders({ config, job, filteredHeaders })), - mergeMap(conditionalHeaders => getCustomLogo({ reporting, config, job, conditionalHeaders })), + map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), + mergeMap(conditionalHeaders => getCustomLogo({ reporting, server, job, conditionalHeaders })), mergeMap(({ logo, conditionalHeaders }) => { - const urls = getFullUrls({ config, job }); + const urls = getFullUrls({ server, job }); const { browserTimezone, layout, title } = job; return generatePdfObservable( diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts index 7021fae983aa2..d78effaa1fc2f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts @@ -8,8 +8,7 @@ import { groupBy } from 'lodash'; import * as Rx from 'rxjs'; import { mergeMap } from 'rxjs/operators'; import { LevelLogger } from '../../../../server/lib'; -import { ReportingConfigType } from '../../../../server/core'; -import { ConditionalHeaders, HeadlessChromiumDriverFactory } from '../../../../types'; +import { ConditionalHeaders, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; import { createLayout } from '../../../common/layouts'; import { LayoutInstance, LayoutParams } from '../../../common/layouts/layout'; import { screenshotsObservableFactory } from '../../../common/lib/screenshots'; @@ -28,10 +27,10 @@ const getTimeRange = (urlScreenshots: ScreenshotResults[]) => { }; export function generatePdfObservableFactory( - captureConfig: ReportingConfigType['capture'], + server: ServerFacade, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const screenshotsObservable = screenshotsObservableFactory(captureConfig, browserDriverFactory); + const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory); return function generatePdfObservable( logger: LevelLogger, @@ -42,7 +41,7 @@ export function generatePdfObservableFactory( layoutParams: LayoutParams, logo?: string ): Rx.Observable<{ buffer: Buffer; warnings: string[] }> { - const layout = createLayout(captureConfig, layoutParams) as LayoutInstance; + const layout = createLayout(server, layoutParams) as LayoutInstance; const screenshots$ = screenshotsObservable({ logger, urls, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts index e8dd3c5207d92..0a9dcfe986ca6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts @@ -4,8 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { JobDocPayload } from '../../types'; import { LayoutInstance, LayoutParams } from '../common/layouts/layout'; +import { JobDocPayload, ServerFacade, RequestFacade } from '../../types'; // Job params: structure of incoming user request data, after being parsed from RISON export interface JobParamsPDF { diff --git a/x-pack/legacy/plugins/reporting/index.test.js b/x-pack/legacy/plugins/reporting/index.test.js new file mode 100644 index 0000000000000..0d9a717bd7d81 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/index.test.js @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { reporting } from './index'; +import { getConfigSchema } from '../../../test_utils'; + +// The snapshot records the number of cpus available +// to make the snapshot deterministic `os.cpus` needs to be mocked +// but the other members on `os` must remain untouched +jest.mock('os', () => { + const os = jest.requireActual('os'); + os.cpus = () => [{}, {}, {}, {}]; + return os; +}); + +// eslint-disable-next-line jest/valid-describe +const describeWithContext = describe.each([ + [{ dev: false, dist: false }], + [{ dev: true, dist: false }], + [{ dev: false, dist: true }], + [{ dev: true, dist: true }], +]); + +describeWithContext('config schema with context %j', context => { + it('produces correct config', async () => { + const schema = await getConfigSchema(reporting); + const value = await schema.validate({}, { context }); + value.capture.browser.chromium.disableSandbox = ''; + await expect(value).toMatchSnapshot(); + }); +}); diff --git a/x-pack/legacy/plugins/reporting/index.ts b/x-pack/legacy/plugins/reporting/index.ts index fb95e2c2edc24..89e98302cddc9 100644 --- a/x-pack/legacy/plugins/reporting/index.ts +++ b/x-pack/legacy/plugins/reporting/index.ts @@ -8,16 +8,21 @@ import { i18n } from '@kbn/i18n'; import { Legacy } from 'kibana'; import { resolve } from 'path'; import { PLUGIN_ID, UI_SETTINGS_CUSTOM_PDF_LOGO } from './common/constants'; +import { config as reportingConfig } from './config'; import { legacyInit } from './server/legacy'; import { ReportingPluginSpecOptions } from './types'; -const kbToBase64Length = (kb: number) => Math.floor((kb * 1024 * 8) / 6); +const kbToBase64Length = (kb: number) => { + return Math.floor((kb * 1024 * 8) / 6); +}; export const reporting = (kibana: any) => { return new kibana.Plugin({ id: PLUGIN_ID, + configPrefix: 'xpack.reporting', publicDir: resolve(__dirname, 'public'), require: ['kibana', 'elasticsearch', 'xpack_main'], + config: reportingConfig, uiExports: { uiSettingDefaults: { @@ -44,5 +49,14 @@ export const reporting = (kibana: any) => { async init(server: Legacy.Server) { return legacyInit(server, this); }, + + deprecations({ unused }: any) { + return [ + unused('capture.concurrency'), + unused('capture.timeout'), + unused('capture.settleTime'), + unused('kibanaApp'), + ]; + }, } as ReportingPluginSpecOptions); }; diff --git a/x-pack/legacy/plugins/reporting/log_configuration.ts b/x-pack/legacy/plugins/reporting/log_configuration.ts index 7aaed2038bd52..b07475df6304f 100644 --- a/x-pack/legacy/plugins/reporting/log_configuration.ts +++ b/x-pack/legacy/plugins/reporting/log_configuration.ts @@ -6,23 +6,22 @@ import getosSync, { LinuxOs } from 'getos'; import { promisify } from 'util'; -import { BROWSER_TYPE } from './common/constants'; -import { CaptureConfig } from './server/types'; -import { Logger } from './types'; +import { ServerFacade, Logger } from './types'; const getos = promisify(getosSync); -export async function logConfiguration(captureConfig: CaptureConfig, logger: Logger) { - const { - browser: { - type: browserType, - chromium: { disableSandbox }, - }, - } = captureConfig; +export async function logConfiguration(server: ServerFacade, logger: Logger) { + const config = server.config(); + const browserType = config.get('xpack.reporting.capture.browser.type'); logger.debug(`Browser type: ${browserType}`); - if (browserType === BROWSER_TYPE) { - logger.debug(`Chromium sandbox disabled: ${disableSandbox}`); + + if (browserType === 'chromium') { + logger.debug( + `Chromium sandbox disabled: ${config.get( + 'xpack.reporting.capture.browser.chromium.disableSandbox' + )}` + ); } const os = await getos(); diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts index a2f7a1f3ad0da..dc79a6b9db2c1 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts @@ -4,14 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { CaptureConfig } from '../../../../server/types'; - -type ViewportConfig = CaptureConfig['viewport']; -type BrowserConfig = CaptureConfig['browser']['chromium']; +import { BrowserConfig } from '../../../../types'; interface LaunchArgs { userDataDir: BrowserConfig['userDataDir']; - viewport: ViewportConfig; + viewport: BrowserConfig['viewport']; disableSandbox: BrowserConfig['disableSandbox']; proxy: BrowserConfig['proxy']; } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts index cb228150efbcd..f90f2c7aee395 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts @@ -19,8 +19,7 @@ import { import * as Rx from 'rxjs'; import { InnerSubscriber } from 'rxjs/internal/InnerSubscriber'; import { ignoreElements, map, mergeMap, tap } from 'rxjs/operators'; -import { BROWSER_TYPE } from '../../../../common/constants'; -import { CaptureConfig } from '../../../../server/types'; +import { BrowserConfig, CaptureConfig } from '../../../../types'; import { LevelLogger as Logger } from '../../../lib/level_logger'; import { safeChildProcess } from '../../safe_child_process'; import { HeadlessChromiumDriver } from '../driver'; @@ -29,8 +28,7 @@ import { puppeteerLaunch } from '../puppeteer'; import { args } from './args'; type binaryPath = string; -type BrowserConfig = CaptureConfig['browser']['chromium']; -type ViewportConfig = CaptureConfig['viewport']; +type ViewportConfig = BrowserConfig['viewport']; export class HeadlessChromiumDriverFactory { private binaryPath: binaryPath; @@ -39,10 +37,15 @@ export class HeadlessChromiumDriverFactory { private userDataDir: string; private getChromiumArgs: (viewport: ViewportConfig) => string[]; - constructor(binaryPath: binaryPath, logger: Logger, captureConfig: CaptureConfig) { + constructor( + binaryPath: binaryPath, + logger: Logger, + browserConfig: BrowserConfig, + captureConfig: CaptureConfig + ) { this.binaryPath = binaryPath; + this.browserConfig = browserConfig; this.captureConfig = captureConfig; - this.browserConfig = captureConfig.browser.chromium; this.userDataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'chromium-')); this.getChromiumArgs = (viewport: ViewportConfig) => @@ -54,7 +57,7 @@ export class HeadlessChromiumDriverFactory { }); } - type = BROWSER_TYPE; + type = 'chromium'; test(logger: Logger) { const chromiumArgs = args({ @@ -150,7 +153,7 @@ export class HeadlessChromiumDriverFactory { // HeadlessChromiumDriver: object to "drive" a browser page const driver = new HeadlessChromiumDriver(page, { - inspect: !!this.browserConfig.inspect, + inspect: this.browserConfig.inspect, networkPolicy: this.captureConfig.networkPolicy, }); diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts index 5f89662c94da2..d32338ae3e311 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { CaptureConfig } from '../../../server/types'; +import { BrowserConfig, CaptureConfig } from '../../../types'; import { LevelLogger } from '../../lib'; import { HeadlessChromiumDriverFactory } from './driver_factory'; @@ -13,7 +13,8 @@ export { paths } from './paths'; export async function createDriverFactory( binaryPath: string, logger: LevelLogger, + browserConfig: BrowserConfig, captureConfig: CaptureConfig ): Promise { - return new HeadlessChromiumDriverFactory(binaryPath, logger, captureConfig); + return new HeadlessChromiumDriverFactory(binaryPath, logger, browserConfig, captureConfig); } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts index af3b86919dc50..49c6222c9f276 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts @@ -4,22 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Logger } from '../../types'; -import { ReportingConfig } from '../types'; -import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; import { ensureBrowserDownloaded } from './download'; -import { chromium } from './index'; import { installBrowser } from './install'; +import { ServerFacade, CaptureConfig, Logger } from '../../types'; +import { BROWSER_TYPE } from '../../common/constants'; +import { chromium } from './index'; +import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; export async function createBrowserDriverFactory( - config: ReportingConfig, + server: ServerFacade, logger: Logger ): Promise { - const captureConfig = config.get('capture'); - const browserConfig = captureConfig.browser.chromium; - const browserAutoDownload = captureConfig.browser.autoDownload; + const config = server.config(); + + const dataDir: string = config.get('path.data'); + const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); const browserType = captureConfig.browser.type; - const dataDir = config.kbnConfig.get('path', 'data'); + const browserAutoDownload = captureConfig.browser.autoDownload; + const browserConfig = captureConfig.browser[BROWSER_TYPE]; if (browserConfig.disableSandbox) { logger.warning(`Enabling the Chromium sandbox provides an additional layer of protection.`); @@ -30,7 +32,7 @@ export async function createBrowserDriverFactory( try { const { binaryPath } = await installBrowser(logger, chromium, dataDir); - return chromium.createDriverFactory(binaryPath, logger, captureConfig); + return chromium.createDriverFactory(binaryPath, logger, browserConfig, captureConfig); } catch (error) { if (error.cause && ['EACCES', 'EEXIST'].includes(error.cause.code)) { logger.error( diff --git a/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts b/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts index 3697c4b86ce3c..73186966e3d2f 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts @@ -4,15 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ -import { existsSync } from 'fs'; import { resolve as resolvePath } from 'path'; -import { BROWSER_TYPE } from '../../../common/constants'; +import { existsSync } from 'fs'; + import { chromium } from '../index'; -import { BrowserDownload } from '../types'; +import { BrowserDownload, BrowserType } from '../types'; + import { md5 } from './checksum'; -import { clean } from './clean'; -import { download } from './download'; import { asyncMap } from './util'; +import { download } from './download'; +import { clean } from './clean'; /** * Check for the downloaded archive of each requested browser type and @@ -20,7 +21,7 @@ import { asyncMap } from './util'; * @param {String} browserType * @return {Promise} */ -export async function ensureBrowserDownloaded(browserType = BROWSER_TYPE) { +export async function ensureBrowserDownloaded(browserType: BrowserType) { await ensureDownloaded([chromium]); } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts b/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts index 9714c5965a5db..b36345c08bfee 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts @@ -6,7 +6,12 @@ import * as _ from 'lodash'; import { parse } from 'url'; -import { NetworkPolicyRule } from '../../types'; + +interface FirewallRule { + allow: boolean; + host?: string; + protocol?: string; +} const isHostMatch = (actualHost: string, ruleHost: string) => { const hostParts = actualHost.split('.').reverse(); @@ -15,7 +20,7 @@ const isHostMatch = (actualHost: string, ruleHost: string) => { return _.every(ruleParts, (part, idx) => part === hostParts[idx]); }; -export const allowRequest = (url: string, rules: NetworkPolicyRule[]) => { +export const allowRequest = (url: string, rules: FirewallRule[]) => { const parsed = parse(url); if (!rules.length) { diff --git a/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts b/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts index f096073ec2f5f..0c480fc82752b 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts @@ -4,6 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ +export type BrowserType = 'chromium'; + export interface BrowserDownload { paths: { archivesPath: string; diff --git a/x-pack/legacy/plugins/reporting/server/config/config.js b/x-pack/legacy/plugins/reporting/server/config/config.js new file mode 100644 index 0000000000000..08e4db464b003 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/config/config.js @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { cpus } from 'os'; + +const defaultCPUCount = 2; + +function cpuCount() { + try { + return cpus().length; + } catch (e) { + return defaultCPUCount; + } +} + +export const config = { + concurrency: cpuCount(), +}; diff --git a/x-pack/legacy/plugins/reporting/server/core.ts b/x-pack/legacy/plugins/reporting/server/core.ts index c233a63833950..4506d41e4f5c3 100644 --- a/x-pack/legacy/plugins/reporting/server/core.ts +++ b/x-pack/legacy/plugins/reporting/server/core.ts @@ -7,14 +7,12 @@ import * as Rx from 'rxjs'; import { first, mapTo } from 'rxjs/operators'; import { - ElasticsearchServiceSetup, IUiSettingsClient, KibanaRequest, SavedObjectsClient, SavedObjectsServiceStart, UiSettingsServiceStart, } from 'src/core/server'; -import { ConfigType as ReportingConfigType } from '../../../../plugins/reporting/server'; // @ts-ignore no module definition import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; @@ -27,63 +25,14 @@ import { ReportingSetupDeps } from './types'; interface ReportingInternalSetup { browserDriverFactory: HeadlessChromiumDriverFactory; - config: ReportingConfig; - elasticsearch: ElasticsearchServiceSetup; } interface ReportingInternalStart { - enqueueJob: EnqueueJobFn; - esqueue: ESQueueInstance; savedObjects: SavedObjectsServiceStart; uiSettings: UiSettingsServiceStart; + esqueue: ESQueueInstance; + enqueueJob: EnqueueJobFn; } -// make config.get() aware of the value type it returns -interface Config { - get(key1: Key1): BaseType[Key1]; - get( - key1: Key1, - key2: Key2 - ): BaseType[Key1][Key2]; - get< - Key1 extends keyof BaseType, - Key2 extends keyof BaseType[Key1], - Key3 extends keyof BaseType[Key1][Key2] - >( - key1: Key1, - key2: Key2, - key3: Key3 - ): BaseType[Key1][Key2][Key3]; - get< - Key1 extends keyof BaseType, - Key2 extends keyof BaseType[Key1], - Key3 extends keyof BaseType[Key1][Key2], - Key4 extends keyof BaseType[Key1][Key2][Key3] - >( - key1: Key1, - key2: Key2, - key3: Key3, - key4: Key4 - ): BaseType[Key1][Key2][Key3][Key4]; -} - -interface KbnServerConfigType { - path: { data: string }; - server: { - basePath: string; - host: string; - name: string; - port: number; - protocol: string; - uuid: string; - }; -} - -export interface ReportingConfig extends Config { - kbnConfig: Config; -} - -export { ReportingConfigType }; - export class ReportingCore { private pluginSetupDeps?: ReportingInternalSetup; private pluginStartDeps?: ReportingInternalStart; @@ -96,7 +45,6 @@ export class ReportingCore { legacySetup( xpackMainPlugin: XPackMainPlugin, reporting: ReportingPluginSpecOptions, - config: ReportingConfig, __LEGACY: ServerFacade, plugins: ReportingSetupDeps ) { @@ -108,7 +56,7 @@ export class ReportingCore { xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); }); // Reporting routes - registerRoutes(this, config, __LEGACY, plugins, this.logger); + registerRoutes(this, __LEGACY, plugins, this.logger); } public pluginSetup(reportingSetupDeps: ReportingInternalSetup) { @@ -142,31 +90,23 @@ export class ReportingCore { return (await this.getPluginSetupDeps()).browserDriverFactory; } - public async getConfig(): Promise { - return (await this.getPluginSetupDeps()).config; - } - /* - * Outside dependencies + * Kibana core module dependencies */ - private async getPluginSetupDeps(): Promise { + private async getPluginSetupDeps() { if (this.pluginSetupDeps) { return this.pluginSetupDeps; } return await this.pluginSetup$.pipe(first()).toPromise(); } - private async getPluginStartDeps(): Promise { + private async getPluginStartDeps() { if (this.pluginStartDeps) { return this.pluginStartDeps; } return await this.pluginStart$.pipe(first()).toPromise(); } - public async getElasticsearchService(): Promise { - return (await this.getPluginSetupDeps()).elasticsearch; - } - public async getSavedObjectsClient(fakeRequest: KibanaRequest): Promise { const { savedObjects } = await this.getPluginStartDeps(); return savedObjects.getScopedClient(fakeRequest) as SavedObjectsClient; diff --git a/x-pack/legacy/plugins/reporting/server/index.ts b/x-pack/legacy/plugins/reporting/server/index.ts index efcfd6b7f783d..24e2a954415d9 100644 --- a/x-pack/legacy/plugins/reporting/server/index.ts +++ b/x-pack/legacy/plugins/reporting/server/index.ts @@ -11,5 +11,5 @@ export const plugin = (context: PluginInitializerContext) => { return new Plugin(context); }; +export { ReportingCore } from './core'; export { ReportingPlugin } from './plugin'; -export { ReportingConfig, ReportingCore } from './core'; diff --git a/x-pack/legacy/plugins/reporting/server/legacy.ts b/x-pack/legacy/plugins/reporting/server/legacy.ts index 29e5af529767e..336ff5f4d2ee7 100644 --- a/x-pack/legacy/plugins/reporting/server/legacy.ts +++ b/x-pack/legacy/plugins/reporting/server/legacy.ts @@ -4,75 +4,35 @@ * you may not use this file except in compliance with the Elastic License. */ import { Legacy } from 'kibana'; -import { get } from 'lodash'; -import { take } from 'rxjs/operators'; -import { CoreSetup, PluginInitializerContext } from 'src/core/server'; -import { ConfigType, PluginsSetup } from '../../../../plugins/reporting/server'; +import { PluginInitializerContext } from 'src/core/server'; import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { ReportingPluginSpecOptions } from '../types'; import { plugin } from './index'; -import { LegacySetup, ReportingConfig, ReportingStartDeps } from './types'; +import { LegacySetup, ReportingStartDeps } from './types'; const buildLegacyDependencies = ( - coreSetup: CoreSetup, server: Legacy.Server, reportingPlugin: ReportingPluginSpecOptions -): LegacySetup => { - return { - route: server.route.bind(server), - plugins: { - xpack_main: server.plugins.xpack_main, - reporting: reportingPlugin, - }, - }; -}; - -const buildConfig = ( - coreSetup: CoreSetup, - server: Legacy.Server, - reportingConfig: ConfigType -): ReportingConfig => { - const config = server.config(); - const { http } = coreSetup; - const serverInfo = http.getServerInfo(); - - const kbnConfig = { - path: { - data: config.get('path.data'), // FIXME: get from the real PluginInitializerContext - }, - server: { - basePath: coreSetup.http.basePath.serverBasePath, - host: serverInfo.host, - name: serverInfo.name, - port: serverInfo.port, - uuid: coreSetup.uuid.getInstanceUuid(), - protocol: serverInfo.protocol, - }, - }; - - // spreading arguments as an array allows the return type to be known by the compiler - return { - get: (...keys: string[]) => get(reportingConfig, keys.join('.'), null), - kbnConfig: { - get: (...keys: string[]) => get(kbnConfig, keys.join('.'), null), - }, - }; -}; +): LegacySetup => ({ + config: server.config, + info: server.info, + route: server.route.bind(server), + plugins: { + elasticsearch: server.plugins.elasticsearch, + xpack_main: server.plugins.xpack_main, + reporting: reportingPlugin, + }, +}); export const legacyInit = async ( server: Legacy.Server, - reportingLegacyPlugin: ReportingPluginSpecOptions + reportingPlugin: ReportingPluginSpecOptions ) => { - const { core: coreSetup } = server.newPlatform.setup; - const { config$ } = (server.newPlatform.setup.plugins.reporting as PluginsSetup).__legacy; - const reportingConfig = await config$.pipe(take(1)).toPromise(); - const reporting = { config: buildConfig(coreSetup, server, reportingConfig) }; - - const __LEGACY = buildLegacyDependencies(coreSetup, server, reportingLegacyPlugin); + const coreSetup = server.newPlatform.setup.core; + const pluginInstance = plugin(server.newPlatform.coreContext as PluginInitializerContext); - const pluginInstance = plugin(server.newPlatform.coreContext as PluginInitializerContext); // NOTE: mocked-out PluginInitializerContext + const __LEGACY = buildLegacyDependencies(server, reportingPlugin); await pluginInstance.setup(coreSetup, { - reporting, elasticsearch: coreSetup.elasticsearch, security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, usageCollection: server.newPlatform.setup.plugins.usageCollection, @@ -82,6 +42,7 @@ export const legacyInit = async ( // Schedule to call the "start" hook only after start dependencies are ready coreSetup.getStartServices().then(([core, plugins]) => pluginInstance.start(core, { + elasticsearch: coreSetup.elasticsearch, data: (plugins as ReportingStartDeps).data, __LEGACY, }) diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index a05205526dd3e..d593e4625cdf4 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -4,24 +4,22 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ESQueueInstance, Logger } from '../../types'; +import { ElasticsearchServiceSetup } from 'kibana/server'; +import { ESQueueInstance, ServerFacade, QueueConfig, Logger } from '../../types'; import { ReportingCore } from '../core'; -import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed -import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; +import { createWorkerFactory } from './create_worker'; +import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed export async function createQueueFactory( reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger ): Promise { - const [config, elasticsearch] = await Promise.all([ - reporting.getConfig(), - reporting.getElasticsearchService(), - ]); - - const queueConfig = config.get('queue'); - const index = config.get('index'); + const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue'); + const index = server.config().get('xpack.reporting.index'); const queueOptions = { interval: queueConfig.indexInterval, @@ -35,7 +33,7 @@ export async function createQueueFactory( if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = await createWorkerFactory(reporting, config, logger); + const createWorker = createWorkerFactory(reporting, server, elasticsearch, logger); await createWorker(queue); } else { logger.info( diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index 01a937a49873a..d4d913243e18d 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -4,9 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElasticsearchServiceSetup } from 'kibana/server'; import * as sinon from 'sinon'; -import { ReportingConfig, ReportingCore } from '../../server/types'; +import { ReportingCore } from '../../server'; import { createMockReportingCore } from '../../test_helpers'; +import { ServerFacade } from '../../types'; import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; @@ -15,15 +17,21 @@ import { ClientMock } from './esqueue/__tests__/fixtures/legacy_elasticsearch'; import { ExportTypesRegistry } from './export_types_registry'; const configGetStub = sinon.stub(); -configGetStub.withArgs('queue').returns({ +configGetStub.withArgs('xpack.reporting.queue').returns({ pollInterval: 3300, pollIntervalErrorMultiplier: 10, }); -configGetStub.withArgs('server', 'name').returns('test-server-123'); -configGetStub.withArgs('server', 'uuid').returns('g9ymiujthvy6v8yrh7567g6fwzgzftzfr'); +configGetStub.withArgs('server.name').returns('test-server-123'); +configGetStub.withArgs('server.uuid').returns('g9ymiujthvy6v8yrh7567g6fwzgzftzfr'); const executeJobFactoryStub = sinon.stub(); -const getMockLogger = sinon.stub(); + +const getMockServer = (): ServerFacade => { + return ({ + config: () => ({ get: configGetStub }), + } as unknown) as ServerFacade; +}; +const getMockLogger = jest.fn(); const getMockExportTypesRegistry = ( exportTypes: any[] = [{ executeJobFactory: executeJobFactoryStub }] @@ -33,23 +41,25 @@ const getMockExportTypesRegistry = ( } as ExportTypesRegistry); describe('Create Worker', () => { - let mockReporting: ReportingCore; - let mockConfig: ReportingConfig; let queue: Esqueue; let client: ClientMock; + let mockReporting: ReportingCore; beforeEach(async () => { mockReporting = await createMockReportingCore(); - mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); - mockConfig = { get: configGetStub, kbnConfig: { get: configGetStub } }; - mockReporting.getConfig = () => Promise.resolve(mockConfig); client = new ClientMock(); queue = new Esqueue('reporting-queue', { client }); executeJobFactoryStub.reset(); }); test('Creates a single Esqueue worker for Reporting', async () => { - const createWorker = await createWorkerFactory(mockReporting, mockConfig, getMockLogger()); + mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); + const createWorker = createWorkerFactory( + mockReporting, + getMockServer(), + {} as ElasticsearchServiceSetup, + getMockLogger() + ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); await createWorker(queue); @@ -81,7 +91,12 @@ Object { { executeJobFactory: executeJobFactoryStub }, ]); mockReporting.getExportTypesRegistry = () => exportTypesRegistry; - const createWorker = await createWorkerFactory(mockReporting, mockConfig, getMockLogger()); + const createWorker = createWorkerFactory( + mockReporting, + getMockServer(), + {} as ElasticsearchServiceSetup, + getMockLogger() + ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); await createWorker(queue); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index e9d0acf29c721..3567712367608 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CancellationToken } from '../../common/cancellation_token'; import { PLUGIN_ID } from '../../common/constants'; -import { ReportingConfig } from '../../server/types'; import { ESQueueInstance, ESQueueWorkerExecuteFn, @@ -15,22 +15,25 @@ import { JobDocPayload, JobSource, Logger, + QueueConfig, RequestFacade, + ServerFacade, } from '../../types'; import { ReportingCore } from '../core'; // @ts-ignore untyped dependency import { events as esqueueEvents } from './esqueue'; -export async function createWorkerFactory( +export function createWorkerFactory( reporting: ReportingCore, - config: ReportingConfig, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger ) { type JobDocPayloadType = JobDocPayload; - - const queueConfig = config.get('queue'); - const kibanaName = config.kbnConfig.get('server', 'name'); - const kibanaId = config.kbnConfig.get('server', 'uuid'); + const config = server.config(); + const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); + const kibanaName: string = config.get('server.name'); + const kibanaId: string = config.get('server.uuid'); // Once more document types are added, this will need to be passed in return async function createWorker(queue: ESQueueInstance) { @@ -44,7 +47,12 @@ export async function createWorkerFactory( ExportTypeDefinition >) { // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = await exportType.executeJobFactory(reporting, logger); + const jobExecutor = await exportType.executeJobFactory( + reporting, + server, + elasticsearch, + logger + ); jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/crypto.ts b/x-pack/legacy/plugins/reporting/server/lib/crypto.ts index 97876529ecfa7..dbc01fc947f8b 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/crypto.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/crypto.ts @@ -5,7 +5,12 @@ */ import nodeCrypto from '@elastic/node-crypto'; +import { oncePerServer } from './once_per_server'; +import { ServerFacade } from '../../types'; -export function cryptoFactory(encryptionKey: string | undefined) { +function cryptoFn(server: ServerFacade) { + const encryptionKey = server.config().get('xpack.reporting.encryptionKey'); return nodeCrypto({ encryptionKey }); } + +export const cryptoFactory = oncePerServer(cryptoFn); diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index bc4754b02ed57..c215bdc398904 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -5,18 +5,22 @@ */ import { get } from 'lodash'; +import { ElasticsearchServiceSetup } from 'kibana/server'; +// @ts-ignore +import { events as esqueueEvents } from './esqueue'; import { - ConditionalHeaders, EnqueueJobFn, ESQueueCreateJobFn, ImmediateCreateJobFn, Job, - Logger, + ServerFacade, RequestFacade, + Logger, + CaptureConfig, + QueueConfig, + ConditionalHeaders, } from '../../types'; import { ReportingCore } from '../core'; -// @ts-ignore -import { events as esqueueEvents } from './esqueue'; interface ConfirmedJob { id: string; @@ -25,16 +29,18 @@ interface ConfirmedJob { _primary_term: number; } -export async function enqueueJobFactory( +export function enqueueJobFactory( reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger -): Promise { - const config = await reporting.getConfig(); +): EnqueueJobFn { const logger = parentLogger.clone(['queue-job']); - const captureConfig = config.get('capture'); + const config = server.config(); + const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); const browserType = captureConfig.browser.type; const maxAttempts = captureConfig.maxAttempts; - const queueConfig = config.get('queue'); + const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); return async function enqueueJob( exportTypeId: string, @@ -53,7 +59,12 @@ export async function enqueueJobFactory( } // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = (await exportType.createJobFactory(reporting, logger)) as CreateJobFn; + const createJob = exportType.createJobFactory( + reporting, + server, + elasticsearch, + logger + ) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/index_timestamp.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/index_timestamp.js index 8e4047e2f22e5..6cdbe8f968f75 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/index_timestamp.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/index_timestamp.js @@ -8,7 +8,6 @@ import moment from 'moment'; export const intervals = ['year', 'month', 'week', 'day', 'hour', 'minute']; -// TODO: remove this helper by using `schema.duration` objects in the reporting config schema export function indexTimestamp(intervalStr, separator = '-') { if (separator.match(/[a-z]/i)) throw new Error('Interval separator can not be a letter'); diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index 5e73fe77ecb79..49d5c568c3981 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -6,10 +6,10 @@ import { Legacy } from 'kibana'; import { KibanaRequest } from '../../../../../../src/core/server'; -import { Logger } from '../../types'; +import { ServerFacade } from '../../types'; import { ReportingSetupDeps } from '../types'; -export function getUserFactory(security: ReportingSetupDeps['security'], logger: Logger) { +export function getUserFactory(server: ServerFacade, security: ReportingSetupDeps['security']) { /* * Legacy.Request because this is called from routing middleware */ diff --git a/x-pack/legacy/plugins/reporting/server/lib/index.ts b/x-pack/legacy/plugins/reporting/server/lib/index.ts index f5ccbe493a91f..0a2db749cb954 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/index.ts @@ -4,10 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -export { checkLicenseFactory } from './check_license'; -export { createQueueFactory } from './create_queue'; -export { cryptoFactory } from './crypto'; -export { enqueueJobFactory } from './enqueue_job'; export { getExportTypesRegistry } from './export_types_registry'; +export { checkLicenseFactory } from './check_license'; export { LevelLogger } from './level_logger'; +export { cryptoFactory } from './crypto'; +export { oncePerServer } from './once_per_server'; export { runValidations } from './validate'; +export { createQueueFactory } from './create_queue'; +export { enqueueJobFactory } from './enqueue_job'; diff --git a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts index 0affc111c1368..c01e6377b039e 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts @@ -9,8 +9,7 @@ import Boom from 'boom'; import { errors as elasticsearchErrors } from 'elasticsearch'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; -import { JobSource } from '../../types'; -import { ReportingConfig } from '../types'; +import { JobSource, ServerFacade } from '../../types'; const esErrors = elasticsearchErrors as Record; const defaultSize = 10; @@ -40,11 +39,8 @@ interface CountAggResult { count: number; } -export function jobsQueryFactory( - config: ReportingConfig, - elasticsearch: ElasticsearchServiceSetup -) { - const index = config.get('index'); +export function jobsQueryFactory(server: ServerFacade, elasticsearch: ElasticsearchServiceSetup) { + const index = server.config().get('xpack.reporting.index'); const { callAsInternalUser } = elasticsearch.adminClient; function getUsername(user: any) { diff --git a/x-pack/legacy/plugins/reporting/server/lib/once_per_server.ts b/x-pack/legacy/plugins/reporting/server/lib/once_per_server.ts new file mode 100644 index 0000000000000..ae3636079a9bb --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/lib/once_per_server.ts @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { memoize, MemoizedFunction } from 'lodash'; +import { ServerFacade } from '../../types'; + +type ServerFn = (server: ServerFacade) => any; +type Memo = ((server: ServerFacade) => any) & MemoizedFunction; + +/** + * allow this function to be called multiple times, but + * ensure that it only received one argument, the server, + * and cache the return value so that subsequent calls get + * the exact same value. + * + * This is intended to be used by service factories like getObjectQueueFactory + * + * @param {Function} fn - the factory function + * @return {any} + */ +export function oncePerServer(fn: ServerFn) { + const memoized: Memo = memoize(function(server: ServerFacade) { + if (arguments.length !== 1) { + throw new TypeError('This function expects to be called with a single argument'); + } + + // @ts-ignore + return fn.call(this, server); + }); + + // @ts-ignore + // Type 'WeakMap' is not assignable to type 'MapCache + + // use a weak map a the cache so that: + // 1. return values mapped to the actual server instance + // 2. return value lifecycle matches that of the server + memoized.cache = new WeakMap(); + + return memoized; +} diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js new file mode 100644 index 0000000000000..10980f702d849 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; +import sinon from 'sinon'; +import { validateEncryptionKey } from '../validate_encryption_key'; + +describe('Reporting: Validate config', () => { + const logger = { + warning: sinon.spy(), + }; + + beforeEach(() => { + logger.warning.resetHistory(); + }); + + [undefined, null].forEach(value => { + it(`should log a warning and set xpack.reporting.encryptionKey if encryptionKey is ${value}`, () => { + const config = { + get: sinon.stub().returns(value), + set: sinon.stub(), + }; + + expect(() => validateEncryptionKey({ config: () => config }, logger)).not.to.throwError(); + + sinon.assert.calledWith(config.set, 'xpack.reporting.encryptionKey'); + sinon.assert.calledWithMatch(logger.warning, /Generating a random key/); + sinon.assert.calledWithMatch(logger.warning, /please set xpack.reporting.encryptionKey/); + }); + }); +}); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts new file mode 100644 index 0000000000000..04f998fd3e5a5 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; +import sinon from 'sinon'; +import { ServerFacade } from '../../../../types'; +import { validateServerHost } from '../validate_server_host'; + +const configKey = 'xpack.reporting.kibanaServer.hostname'; + +describe('Reporting: Validate server host setting', () => { + it(`should log a warning and set ${configKey} if server.host is "0"`, () => { + const getStub = sinon.stub(); + getStub.withArgs('server.host').returns('0'); + getStub.withArgs(configKey).returns(undefined); + const config = { + get: getStub, + set: sinon.stub(), + }; + + expect(() => + validateServerHost(({ config: () => config } as unknown) as ServerFacade) + ).to.throwError(); + + sinon.assert.calledWith(config.set, configKey); + }); +}); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts index 85d9f727d7fa7..0fdbd858b8e3c 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts @@ -6,22 +6,25 @@ import { i18n } from '@kbn/i18n'; import { ElasticsearchServiceSetup } from 'kibana/server'; -import { Logger } from '../../../types'; +import { Logger, ServerFacade } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; -import { ReportingConfig } from '../../types'; import { validateBrowser } from './validate_browser'; +import { validateEncryptionKey } from './validate_encryption_key'; import { validateMaxContentLength } from './validate_max_content_length'; +import { validateServerHost } from './validate_server_host'; export async function runValidations( - config: ReportingConfig, + server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, browserFactory: HeadlessChromiumDriverFactory, logger: Logger ) { try { await Promise.all([ - validateBrowser(browserFactory, logger), - validateMaxContentLength(config, elasticsearch, logger), + validateBrowser(server, browserFactory, logger), + validateEncryptionKey(server, logger), + validateMaxContentLength(server, elasticsearch, logger), + validateServerHost(server), ]); logger.debug( i18n.translate('xpack.reporting.selfCheck.ok', { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts index d6512d5eb718b..89c49123e85bf 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts @@ -3,10 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - import { Browser } from 'puppeteer'; import { BROWSER_TYPE } from '../../../common/constants'; -import { Logger } from '../../../types'; +import { ServerFacade, Logger } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; /* @@ -14,6 +13,7 @@ import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_fa * to the locally running Kibana instance. */ export const validateBrowser = async ( + server: ServerFacade, browserFactory: HeadlessChromiumDriverFactory, logger: Logger ) => { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts new file mode 100644 index 0000000000000..e0af94cbdc29c --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { i18n } from '@kbn/i18n'; +import crypto from 'crypto'; +import { ServerFacade, Logger } from '../../../types'; + +export function validateEncryptionKey(serverFacade: ServerFacade, logger: Logger) { + const config = serverFacade.config(); + + const encryptionKey = config.get('xpack.reporting.encryptionKey'); + if (encryptionKey == null) { + // TODO this should simply throw an error and let the handler conver it to a warning mesasge. See validateServerHost. + logger.warning( + i18n.translate('xpack.reporting.selfCheckEncryptionKey.warning', { + defaultMessage: + `Generating a random key for {setting}. To prevent pending reports ` + + `from failing on restart, please set {setting} in kibana.yml`, + values: { + setting: 'xpack.reporting.encryptionKey', + }, + }) + ); + + // @ts-ignore: No set() method on KibanaConfig, just get() and has() + config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex')); // update config in memory to contain a usable encryption key + } +} diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js index 2551fd48b91f3..942dcaf842696 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js @@ -32,7 +32,11 @@ describe('Reporting: Validate Max Content Length', () => { }); it('should log warning messages when reporting has a higher max-size than elasticsearch', async () => { - const config = { get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES) }; + const server = { + config: () => ({ + get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES), + }), + }; const elasticsearch = { dataClient: { callAsInternalUser: () => ({ @@ -45,7 +49,7 @@ describe('Reporting: Validate Max Content Length', () => { }, }; - await validateMaxContentLength(config, elasticsearch, logger); + await validateMaxContentLength(server, elasticsearch, logger); sinon.assert.calledWithMatch( logger.warning, @@ -66,10 +70,14 @@ describe('Reporting: Validate Max Content Length', () => { }); it('should do nothing when reporting has the same max-size as elasticsearch', async () => { - const config = { get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES) }; + const server = { + config: () => ({ + get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES), + }), + }; expect( - async () => await validateMaxContentLength(config, elasticsearch, logger.warning) + async () => await validateMaxContentLength(server, elasticsearch, logger.warning) ).not.toThrow(); sinon.assert.notCalled(logger.warning); }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts index a20905ba093d4..ce4a5b93e7431 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts @@ -7,17 +7,17 @@ import numeral from '@elastic/numeral'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { defaults, get } from 'lodash'; -import { Logger } from '../../../types'; -import { ReportingConfig } from '../../types'; +import { Logger, ServerFacade } from '../../../types'; -const KIBANA_MAX_SIZE_BYTES_PATH = 'csv.maxSizeBytes'; +const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes'; const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length'; export async function validateMaxContentLength( - config: ReportingConfig, + server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, logger: Logger ) { + const config = server.config(); const { callAsInternalUser } = elasticsearch.dataClient; const elasticClusterSettingsResponse = await callAsInternalUser('cluster.getSettings', { @@ -28,13 +28,13 @@ export async function validateMaxContentLength( const elasticSearchMaxContent = get(elasticClusterSettings, 'http.max_content_length', '100mb'); const elasticSearchMaxContentBytes = numeral().unformat(elasticSearchMaxContent.toUpperCase()); - const kibanaMaxContentBytes = config.get('csv', 'maxSizeBytes'); + const kibanaMaxContentBytes: number = config.get(KIBANA_MAX_SIZE_BYTES_PATH); if (kibanaMaxContentBytes > elasticSearchMaxContentBytes) { // TODO this should simply throw an error and let the handler conver it to a warning mesasge. See validateServerHost. logger.warning( - `xpack.reporting.${KIBANA_MAX_SIZE_BYTES_PATH} (${kibanaMaxContentBytes}) is higher than ElasticSearch's ${ES_MAX_SIZE_BYTES_PATH} (${elasticSearchMaxContentBytes}). ` + - `Please set ${ES_MAX_SIZE_BYTES_PATH} in ElasticSearch to match, or lower your xpack.reporting.${KIBANA_MAX_SIZE_BYTES_PATH} in Kibana to avoid this warning.` + `${KIBANA_MAX_SIZE_BYTES_PATH} (${kibanaMaxContentBytes}) is higher than ElasticSearch's ${ES_MAX_SIZE_BYTES_PATH} (${elasticSearchMaxContentBytes}). ` + + `Please set ${ES_MAX_SIZE_BYTES_PATH} in ElasticSearch to match, or lower your ${KIBANA_MAX_SIZE_BYTES_PATH} in Kibana to avoid this warning.` ); } } diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts new file mode 100644 index 0000000000000..f4f4d61246b6a --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ServerFacade } from '../../../types'; + +const configKey = 'xpack.reporting.kibanaServer.hostname'; + +export function validateServerHost(serverFacade: ServerFacade) { + const config = serverFacade.config(); + + const serverHost = config.get('server.host'); + const reportingKibanaHostName = config.get(configKey); + + if (!reportingKibanaHostName && serverHost === '0') { + // @ts-ignore: No set() method on KibanaConfig, just get() and has() + config.set(configKey, '0.0.0.0'); // update config in memory to allow Reporting to work + + throw new Error( + `Found 'server.host: "0"' in settings. This is incompatible with Reporting. ` + + `To enable Reporting to work, '${configKey}: 0.0.0.0' is being automatically to the configuration. ` + + `You can change to 'server.host: 0.0.0.0' or add '${configKey}: 0.0.0.0' in kibana.yml to prevent this message.` + ); + } +} diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index 1d7cc075b690d..4f24cc16b2277 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -12,6 +12,8 @@ import { createQueueFactory, enqueueJobFactory, LevelLogger, runValidations } fr import { setFieldFormats } from './services'; import { ReportingSetup, ReportingSetupDeps, ReportingStart, ReportingStartDeps } from './types'; import { registerReportingUsageCollector } from './usage'; +// @ts-ignore no module definition +import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; export class ReportingPlugin implements Plugin { @@ -24,29 +26,29 @@ export class ReportingPlugin } public async setup(core: CoreSetup, plugins: ReportingSetupDeps) { - const { reporting: reportingNewPlatform, elasticsearch, __LEGACY } = plugins; - const { config } = reportingNewPlatform; + const { elasticsearch, usageCollection, __LEGACY } = plugins; - const browserDriverFactory = await createBrowserDriverFactory(config, this.logger); // required for validations :( - runValidations(config, elasticsearch, browserDriverFactory, this.logger); // this must run early, as it sets up config defaults + const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, this.logger); // required for validations :( + runValidations(__LEGACY, elasticsearch, browserDriverFactory, this.logger); // this must run early, as it sets up config defaults const { xpack_main: xpackMainLegacy, reporting: reportingLegacy } = __LEGACY.plugins; - this.reportingCore.legacySetup(xpackMainLegacy, reportingLegacy, config, __LEGACY, plugins); + this.reportingCore.legacySetup(xpackMainLegacy, reportingLegacy, __LEGACY, plugins); // Register a function with server to manage the collection of usage stats - registerReportingUsageCollector(this.reportingCore, config, plugins); + registerReportingUsageCollector(this.reportingCore, __LEGACY, usageCollection); // regsister setup internals - this.reportingCore.pluginSetup({ browserDriverFactory, config, elasticsearch }); + this.reportingCore.pluginSetup({ browserDriverFactory }); return {}; } public async start(core: CoreStart, plugins: ReportingStartDeps) { const { reportingCore, logger } = this; + const { elasticsearch, __LEGACY } = plugins; - const esqueue = await createQueueFactory(reportingCore, logger); - const enqueueJob = await enqueueJobFactory(reportingCore, logger); + const esqueue = await createQueueFactory(reportingCore, __LEGACY, elasticsearch, logger); + const enqueueJob = enqueueJobFactory(reportingCore, __LEGACY, elasticsearch, logger); this.reportingCore.pluginStart({ savedObjects: core.savedObjects, @@ -56,9 +58,7 @@ export class ReportingPlugin }); setFieldFormats(plugins.data.fieldFormats); - - const config = await reportingCore.getConfig(); - logConfiguration(config.get('capture'), this.logger); + logConfiguration(__LEGACY, this.logger); return {}; } diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts index dc58e97ff3e41..56622617586f7 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts @@ -10,7 +10,7 @@ import { Legacy } from 'kibana'; import rison from 'rison-node'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingConfig, ReportingSetupDeps } from '../types'; +import { ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { GetRouteConfigFactoryFn, @@ -22,7 +22,6 @@ import { HandlerErrorFunction, HandlerFunction } from './types'; const BASE_GENERATE = `${API_BASE_URL}/generate`; export function registerGenerateFromJobParams( - config: ReportingConfig, server: ServerFacade, plugins: ReportingSetupDeps, handler: HandlerFunction, @@ -31,7 +30,7 @@ export function registerGenerateFromJobParams( ) { const getRouteConfig = () => { const getOriginalRouteConfig: GetRouteConfigFactoryFn = getRouteConfigFactoryReportingPre( - config, + server, plugins, logger ); diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts index 23ab7ee0d9e6b..415b6b7d64366 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts @@ -9,7 +9,7 @@ import { get } from 'lodash'; import { API_BASE_GENERATE_V1, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../common/constants'; import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingConfig, ReportingSetupDeps } from '../types'; +import { ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types'; @@ -24,14 +24,13 @@ import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types * - local (transient) changes the user made to the saved object */ export function registerGenerateCsvFromSavedObject( - config: ReportingConfig, server: ServerFacade, plugins: ReportingSetupDeps, handleRoute: HandlerFunction, handleRouteError: HandlerErrorFunction, logger: Logger ) { - const routeOptions = getRouteOptionsCsv(config, plugins, logger); + const routeOptions = getRouteOptionsCsv(server, plugins, logger); server.route({ path: `${API_BASE_GENERATE_V1}/csv/saved-object/{savedObjectType}:{savedObjectId}`, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index 5bd07aa6049ed..5d17fa2e82b8c 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -16,7 +16,7 @@ import { ResponseFacade, ServerFacade, } from '../../types'; -import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; @@ -31,12 +31,12 @@ import { getRouteOptionsCsv } from './lib/route_config_factories'; */ export function registerGenerateCsvFromSavedObjectImmediate( reporting: ReportingCore, - config: ReportingConfig, server: ServerFacade, plugins: ReportingSetupDeps, parentLogger: Logger ) { - const routeOptions = getRouteOptionsCsv(config, plugins, parentLogger); + const routeOptions = getRouteOptionsCsv(server, plugins, parentLogger); + const { elasticsearch } = plugins; /* * CSV export with the `immediate` option does not queue a job with Reporting's ESQueue to run the job async. Instead, this does: @@ -52,10 +52,14 @@ export function registerGenerateCsvFromSavedObjectImmediate( const logger = parentLogger.clone(['savedobject-csv']); const jobParams = getJobParamsFromRequest(request, { isImmediate: true }); - const [createJobFn, executeJobFn] = await Promise.all([ - createJobFactory(reporting, logger), - executeJobFactory(reporting, logger), - ]); + /* TODO these functions should be made available in the export types registry: + * + * const { createJobFn, executeJobFn } = exportTypesRegistry.getById(CSV_FROM_SAVEDOBJECT_JOB_TYPE) + * + * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here + */ + const createJobFn = createJobFactory(reporting, server, elasticsearch, logger); + const executeJobFn = await executeJobFactory(reporting, server, elasticsearch, logger); const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( jobParams, request.headers, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts index 44a98dac2d4a9..54d9671692c5d 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts @@ -7,7 +7,7 @@ import Hapi from 'hapi'; import { createMockReportingCore } from '../../test_helpers'; import { Logger, ServerFacade } from '../../types'; -import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../../server/types'; jest.mock('./lib/authorized_user_pre_routing', () => ({ authorizedUserPreRoutingFactory: () => () => ({}), @@ -22,8 +22,6 @@ import { registerJobGenerationRoutes } from './generation'; let mockServer: Hapi.Server; let mockReportingPlugin: ReportingCore; -let mockReportingConfig: ReportingConfig; - const mockLogger = ({ error: jest.fn(), debug: jest.fn(), @@ -35,12 +33,10 @@ beforeEach(async () => { port: 8080, routes: { log: { collect: true } }, }); - + mockServer.config = () => ({ get: jest.fn(), has: jest.fn() }); mockReportingPlugin = await createMockReportingCore(); mockReportingPlugin.getEnqueueJob = async () => jest.fn().mockImplementation(() => ({ toJSON: () => '{ "job": "data" }' })); - - mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; }); const mockPlugins = { @@ -58,7 +54,6 @@ const getErrorsFromRequest = (request: Hapi.Request) => { test(`returns 400 if there are no job params`, async () => { registerJobGenerationRoutes( mockReportingPlugin, - (mockReportingConfig as unknown) as ReportingConfig, (mockServer as unknown) as ServerFacade, (mockPlugins as unknown) as ReportingSetupDeps, mockLogger @@ -85,7 +80,6 @@ test(`returns 400 if there are no job params`, async () => { test(`returns 400 if job params is invalid`, async () => { registerJobGenerationRoutes( mockReportingPlugin, - (mockReportingConfig as unknown) as ReportingConfig, (mockServer as unknown) as ServerFacade, (mockPlugins as unknown) as ReportingSetupDeps, mockLogger @@ -120,7 +114,6 @@ test(`returns 500 if job handler throws an error`, async () => { registerJobGenerationRoutes( mockReportingPlugin, - (mockReportingConfig as unknown) as ReportingConfig, (mockServer as unknown) as ServerFacade, (mockPlugins as unknown) as ReportingSetupDeps, mockLogger diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 0ac6a34dd75bb..096ba84b63d1a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -9,7 +9,7 @@ import { errors as elasticsearchErrors } from 'elasticsearch'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { registerGenerateFromJobParams } from './generate_from_jobparams'; import { registerGenerateCsvFromSavedObject } from './generate_from_savedobject'; import { registerGenerateCsvFromSavedObjectImmediate } from './generate_from_savedobject_immediate'; @@ -19,13 +19,12 @@ const esErrors = elasticsearchErrors as Record; export function registerJobGenerationRoutes( reporting: ReportingCore, - config: ReportingConfig, server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ) { - const DOWNLOAD_BASE_URL = - `${config.kbnConfig.get('server', 'basePath')}` + `${API_BASE_URL}/jobs/download`; + const config = server.config(); + const DOWNLOAD_BASE_URL = config.get('server.basePath') + `${API_BASE_URL}/jobs/download`; /* * Generates enqueued job details to use in responses @@ -67,11 +66,11 @@ export function registerJobGenerationRoutes( return err; } - registerGenerateFromJobParams(config, server, plugins, handler, handleError, logger); + registerGenerateFromJobParams(server, plugins, handler, handleError, logger); // Register beta panel-action download-related API's - if (config.get('csv', 'enablePanelActionDownload')) { - registerGenerateCsvFromSavedObject(config, server, plugins, handler, handleError, logger); - registerGenerateCsvFromSavedObjectImmediate(reporting, config, server, plugins, logger); + if (config.get('xpack.reporting.csv.enablePanelActionDownload')) { + registerGenerateCsvFromSavedObject(server, plugins, handler, handleError, logger); + registerGenerateCsvFromSavedObjectImmediate(reporting, server, plugins, logger); } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/index.ts b/x-pack/legacy/plugins/reporting/server/routes/index.ts index 21eeb901d9b96..610ab4907d369 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/index.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/index.ts @@ -5,17 +5,16 @@ */ import { Logger, ServerFacade } from '../../types'; -import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { registerJobGenerationRoutes } from './generation'; import { registerJobInfoRoutes } from './jobs'; export function registerRoutes( reporting: ReportingCore, - config: ReportingConfig, server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ) { - registerJobGenerationRoutes(reporting, config, server, plugins, logger); - registerJobInfoRoutes(reporting, config, server, plugins, logger); + registerJobGenerationRoutes(reporting, server, plugins, logger); + registerJobInfoRoutes(reporting, server, plugins, logger); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js index b12aa44487523..071b401d2321b 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js @@ -5,6 +5,7 @@ */ import Hapi from 'hapi'; +import { memoize } from 'lodash'; import { createMockReportingCore } from '../../test_helpers'; import { ExportTypesRegistry } from '../lib/export_types_registry'; @@ -22,7 +23,6 @@ import { registerJobInfoRoutes } from './jobs'; let mockServer; let exportTypesRegistry; let mockReportingPlugin; -let mockReportingConfig; const mockLogger = { error: jest.fn(), debug: jest.fn(), @@ -30,6 +30,7 @@ const mockLogger = { beforeEach(async () => { mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } }); + mockServer.config = memoize(() => ({ get: jest.fn() })); exportTypesRegistry = new ExportTypesRegistry(); exportTypesRegistry.register({ id: 'unencoded', @@ -42,11 +43,8 @@ beforeEach(async () => { jobContentEncoding: 'base64', jobContentExtension: 'pdf', }); - mockReportingPlugin = await createMockReportingCore(); mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry; - - mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; }); const mockPlugins = { @@ -72,13 +70,7 @@ test(`returns 404 if job not found`, async () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())), }; - registerJobInfoRoutes( - mockReportingPlugin, - mockReportingConfig, - mockServer, - mockPlugins, - mockLogger - ); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -97,13 +89,7 @@ test(`returns 401 if not valid job type`, async () => { .mockReturnValue(Promise.resolve(getHits({ jobtype: 'invalidJobType' }))), }; - registerJobInfoRoutes( - mockReportingPlugin, - mockReportingConfig, - mockServer, - mockPlugins, - mockLogger - ); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -124,13 +110,7 @@ describe(`when job is incomplete`, () => { ), }; - registerJobInfoRoutes( - mockReportingPlugin, - mockReportingConfig, - mockServer, - mockPlugins, - mockLogger - ); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -172,13 +152,7 @@ describe(`when job is failed`, () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), }; - registerJobInfoRoutes( - mockReportingPlugin, - mockReportingConfig, - mockServer, - mockPlugins, - mockLogger - ); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -223,13 +197,7 @@ describe(`when job is completed`, () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), }; - registerJobInfoRoutes( - mockReportingPlugin, - mockReportingConfig, - mockServer, - mockPlugins, - mockLogger - ); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index 4f29e561431fa..b9aa75e0ddd00 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -17,7 +17,7 @@ import { ServerFacade, } from '../../types'; import { jobsQueryFactory } from '../lib/jobs_query'; -import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { deleteJobResponseHandlerFactory, downloadJobResponseHandlerFactory, @@ -37,14 +37,13 @@ function isResponse(response: Boom | ResponseObject): response is Response export function registerJobInfoRoutes( reporting: ReportingCore, - config: ReportingConfig, server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ) { const { elasticsearch } = plugins; - const jobsQuery = jobsQueryFactory(config, elasticsearch); - const getRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); + const jobsQuery = jobsQueryFactory(server, elasticsearch); + const getRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); // list jobs in the queue, paginated server.route({ @@ -142,8 +141,8 @@ export function registerJobInfoRoutes( // trigger a download of the output from a job const exportTypesRegistry = reporting.getExportTypesRegistry(); - const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(config, plugins, logger); - const downloadResponseHandler = downloadJobResponseHandlerFactory(config, elasticsearch, exportTypesRegistry); // prettier-ignore + const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(server, plugins, logger); + const downloadResponseHandler = downloadJobResponseHandlerFactory(server, elasticsearch, exportTypesRegistry); // prettier-ignore server.route({ path: `${MAIN_ENTRY}/download/{docId}`, method: 'GET', @@ -182,8 +181,8 @@ export function registerJobInfoRoutes( }); // allow a report to be deleted - const getRouteConfigDelete = getRouteConfigFactoryDeletePre(config, plugins, logger); - const deleteResponseHandler = deleteJobResponseHandlerFactory(config, elasticsearch); + const getRouteConfigDelete = getRouteConfigFactoryDeletePre(server, plugins, logger); + const deleteResponseHandler = deleteJobResponseHandlerFactory(server, elasticsearch); server.route({ path: `${MAIN_ENTRY}/delete/{docId}`, method: 'DELETE', diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js index b5d6ae59ce5dd..3460d22592e3d 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js @@ -7,48 +7,56 @@ import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; describe('authorized_user_pre_routing', function() { - const createMockConfig = (mockConfig = {}) => { - return { - get: (...keys) => mockConfig[keys.join('.')], - kbnConfig: { get: (...keys) => mockConfig[keys.join('.')] }, - }; - }; - const createMockPlugins = (function() { + // the getClientShield is using `once` which forces us to use a constant mock + // which makes testing anything that is dependent on `oncePerServer` confusing. + // so createMockServer reuses the same 'instance' of the server and overwrites + // the properties to contain different values + const createMockServer = (function() { const getUserStub = jest.fn(); + let mockConfig; + + const mockServer = { + expose() {}, + config() { + return { + get(key) { + return mockConfig[key]; + }, + }; + }, + log: function() {}, + plugins: { + xpack_main: {}, + security: { getUser: getUserStub }, + }, + }; return function({ securityEnabled = true, xpackInfoUndefined = false, xpackInfoAvailable = true, - getCurrentUser = undefined, user = undefined, + config = {}, }) { - getUserStub.mockReset(); - getUserStub.mockResolvedValue(user); - return { - security: securityEnabled - ? { - authc: { getCurrentUser }, - } - : null, - __LEGACY: { - plugins: { - xpack_main: { - info: !xpackInfoUndefined && { + mockConfig = config; + + mockServer.plugins.xpack_main = { + info: !xpackInfoUndefined && { + isAvailable: () => xpackInfoAvailable, + feature(featureName) { + if (featureName === 'security') { + return { + isEnabled: () => securityEnabled, isAvailable: () => xpackInfoAvailable, - feature(featureName) { - if (featureName === 'security') { - return { - isEnabled: () => securityEnabled, - isAvailable: () => xpackInfoAvailable, - }; - } - }, - }, - }, + }; + } }, }, }; + + getUserStub.mockReset(); + getUserStub.mockResolvedValue(user); + return mockServer; }; })(); @@ -67,6 +75,10 @@ describe('authorized_user_pre_routing', function() { raw: { req: mockRequestRaw }, }); + const getMockPlugins = pluginSet => { + return pluginSet || { security: null }; + }; + const getMockLogger = () => ({ warn: jest.fn(), error: msg => { @@ -75,9 +87,11 @@ describe('authorized_user_pre_routing', function() { }); it('should return with boom notFound when xpackInfo is undefined', async function() { + const mockServer = createMockServer({ xpackInfoUndefined: true }); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - createMockConfig(), - createMockPlugins({ xpackInfoUndefined: true }), + mockServer, + getMockPlugins(), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -86,9 +100,11 @@ describe('authorized_user_pre_routing', function() { }); it(`should return with boom notFound when xpackInfo isn't available`, async function() { + const mockServer = createMockServer({ xpackInfoAvailable: false }); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - createMockConfig(), - createMockPlugins({ xpackInfoAvailable: false }), + mockServer, + getMockPlugins(), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -97,9 +113,11 @@ describe('authorized_user_pre_routing', function() { }); it('should return with null user when security is disabled in Elasticsearch', async function() { + const mockServer = createMockServer({ securityEnabled: false }); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - createMockConfig(), - createMockPlugins({ securityEnabled: false }), + mockServer, + getMockPlugins(), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -107,14 +125,16 @@ describe('authorized_user_pre_routing', function() { }); it('should return with boom unauthenticated when security is enabled but no authenticated user', async function() { - const mockPlugins = createMockPlugins({ + const mockServer = createMockServer({ user: null, config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, }); - mockPlugins.security = { authc: { getCurrentUser: () => null } }; + const mockPlugins = getMockPlugins({ + security: { authc: { getCurrentUser: () => null } }, + }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - createMockConfig(), + mockServer, mockPlugins, getMockLogger() ); @@ -124,14 +144,16 @@ describe('authorized_user_pre_routing', function() { }); it(`should return with boom forbidden when security is enabled but user doesn't have allowed role`, async function() { - const mockConfig = createMockConfig({ 'roles.allow': ['.reporting_user'] }); - const mockPlugins = createMockPlugins({ + const mockServer = createMockServer({ user: { roles: [] }, - getCurrentUser: () => ({ roles: ['something_else'] }), + config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, + }); + const mockPlugins = getMockPlugins({ + security: { authc: { getCurrentUser: () => ({ roles: ['something_else'] }) } }, }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockConfig, + mockServer, mockPlugins, getMockLogger() ); @@ -142,14 +164,18 @@ describe('authorized_user_pre_routing', function() { it('should return with user when security is enabled and user has explicitly allowed role', async function() { const user = { roles: ['.reporting_user', 'something_else'] }; - const mockConfig = createMockConfig({ 'roles.allow': ['.reporting_user'] }); - const mockPlugins = createMockPlugins({ + const mockServer = createMockServer({ user, - getCurrentUser: () => ({ roles: ['.reporting_user', 'something_else'] }), + config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, + }); + const mockPlugins = getMockPlugins({ + security: { + authc: { getCurrentUser: () => ({ roles: ['.reporting_user', 'something_else'] }) }, + }, }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockConfig, + mockServer, mockPlugins, getMockLogger() ); @@ -159,13 +185,16 @@ describe('authorized_user_pre_routing', function() { it('should return with user when security is enabled and user has superuser role', async function() { const user = { roles: ['superuser', 'something_else'] }; - const mockConfig = createMockConfig({ 'roles.allow': [] }); - const mockPlugins = createMockPlugins({ - getCurrentUser: () => ({ roles: ['superuser', 'something_else'] }), + const mockServer = createMockServer({ + user, + config: { 'xpack.reporting.roles.allow': [] }, + }); + const mockPlugins = getMockPlugins({ + security: { authc: { getCurrentUser: () => ({ roles: ['superuser', 'something_else'] }) } }, }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockConfig, + mockServer, mockPlugins, getMockLogger() ); diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts index 1ca28ca62a7f2..c5f8c78016f61 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts @@ -7,8 +7,7 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; import { AuthenticatedUser } from '../../../../../../plugins/security/server'; -import { ReportingConfig } from '../../../server'; -import { Logger } from '../../../types'; +import { Logger, ServerFacade } from '../../../types'; import { getUserFactory } from '../../lib/get_user'; import { ReportingSetupDeps } from '../../types'; @@ -19,14 +18,16 @@ export type PreRoutingFunction = ( ) => Promise | AuthenticatedUser | null>; export const authorizedUserPreRoutingFactory = function authorizedUserPreRoutingFn( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ) { - const getUser = getUserFactory(plugins.security, logger); - const { info: xpackInfo } = plugins.__LEGACY.plugins.xpack_main; + const getUser = getUserFactory(server, plugins.security); + const config = server.config(); return async function authorizedUserPreRouting(request: Legacy.Request) { + const xpackInfo = server.plugins.xpack_main.info; + if (!xpackInfo || !xpackInfo.isAvailable()) { logger.warn('Unable to authorize user before xpack info is available.', [ 'authorizedUserPreRouting', @@ -45,7 +46,10 @@ export const authorizedUserPreRoutingFactory = function authorizedUserPreRouting return Boom.unauthorized(`Sorry, you aren't authenticated`); } - const authorizedRoles = [superuserRole, ...(config.get('roles', 'allow') as string[])]; + const authorizedRoles = [ + superuserRole, + ...(config.get('xpack.reporting.roles.allow') as string[]), + ]; if (!user.roles.find(role => authorizedRoles.includes(role))) { return Boom.forbidden(`Sorry, you don't have access to Reporting`); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts index aef37754681ec..fb3944ea33552 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts @@ -8,7 +8,13 @@ import contentDisposition from 'content-disposition'; import * as _ from 'lodash'; import { CSV_JOB_TYPE } from '../../../common/constants'; -import { ExportTypeDefinition, ExportTypesRegistry, JobDocOutput, JobSource } from '../../../types'; +import { + ExportTypeDefinition, + ExportTypesRegistry, + JobDocOutput, + JobSource, + ServerFacade, +} from '../../../types'; interface ICustomHeaders { [x: string]: any; @@ -16,15 +22,9 @@ interface ICustomHeaders { type ExportTypeType = ExportTypeDefinition; -interface ErrorFromPayload { - message: string; - reason: string | null; -} - -// A camelCase version of JobDocOutput interface Payload { statusCode: number; - content: string | Buffer | ErrorFromPayload; + content: any; contentType: string; headers: Record; } @@ -48,17 +48,20 @@ const getReportingHeaders = (output: JobDocOutput, exportType: ExportTypeType) = return metaDataHeaders; }; -export function getDocumentPayloadFactory(exportTypesRegistry: ExportTypesRegistry) { - function encodeContent(content: string | null, exportType: ExportTypeType): Buffer | string { +export function getDocumentPayloadFactory( + server: ServerFacade, + exportTypesRegistry: ExportTypesRegistry +) { + function encodeContent(content: string | null, exportType: ExportTypeType) { switch (exportType.jobContentEncoding) { case 'base64': - return content ? Buffer.from(content, 'base64') : ''; // convert null to empty string + return content ? Buffer.from(content, 'base64') : content; // Buffer.from rejects null default: - return content ? content : ''; // convert null to empty string + return content; } } - function getCompleted(output: JobDocOutput, jobType: string, title: string): Payload { + function getCompleted(output: JobDocOutput, jobType: string, title: string) { const exportType = exportTypesRegistry.get((item: ExportTypeType) => item.jobType === jobType); const filename = getTitle(exportType, title); const headers = getReportingHeaders(output, exportType); @@ -74,7 +77,7 @@ export function getDocumentPayloadFactory(exportTypesRegistry: ExportTypesRegist }; } - function getFailure(output: JobDocOutput): Payload { + function getFailure(output: JobDocOutput) { return { statusCode: 500, content: { diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts index e7e7c866db96a..30627d5b23230 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts @@ -5,12 +5,11 @@ */ import Boom from 'boom'; -import { ResponseToolkit } from 'hapi'; import { ElasticsearchServiceSetup } from 'kibana/server'; +import { ResponseToolkit } from 'hapi'; import { WHITELISTED_JOB_CONTENT_TYPES } from '../../../common/constants'; -import { ExportTypesRegistry } from '../../../types'; +import { ExportTypesRegistry, ServerFacade } from '../../../types'; import { jobsQueryFactory } from '../../lib/jobs_query'; -import { ReportingConfig } from '../../types'; import { getDocumentPayloadFactory } from './get_document_payload'; interface JobResponseHandlerParams { @@ -22,12 +21,12 @@ interface JobResponseHandlerOpts { } export function downloadJobResponseHandlerFactory( - config: ReportingConfig, + server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, exportTypesRegistry: ExportTypesRegistry ) { - const jobsQuery = jobsQueryFactory(config, elasticsearch); - const getDocumentPayload = getDocumentPayloadFactory(exportTypesRegistry); + const jobsQuery = jobsQueryFactory(server, elasticsearch); + const getDocumentPayload = getDocumentPayloadFactory(server, exportTypesRegistry); return function jobResponseHandler( validJobTypes: string[], @@ -71,10 +70,10 @@ export function downloadJobResponseHandlerFactory( } export function deleteJobResponseHandlerFactory( - config: ReportingConfig, + server: ServerFacade, elasticsearch: ElasticsearchServiceSetup ) { - const jobsQuery = jobsQueryFactory(config, elasticsearch); + const jobsQuery = jobsQueryFactory(server, elasticsearch); return async function deleteJobResponseHander( validJobTypes: string[], diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index 8a79566aafae2..9e618ff1fe40a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -6,17 +6,17 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; -import { Logger } from '../../../types'; -import { ReportingConfig, ReportingSetupDeps } from '../../types'; +import { Logger, ServerFacade } from '../../../types'; +import { ReportingSetupDeps } from '../../types'; export type GetReportingFeatureIdFn = (request: Legacy.Request) => string; export const reportingFeaturePreRoutingFactory = function reportingFeaturePreRoutingFn( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ) { - const xpackMainPlugin = plugins.__LEGACY.plugins.xpack_main; + const xpackMainPlugin = server.plugins.xpack_main; const pluginId = 'reporting'; // License checking and enable/disable logic diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index 06f7efaa9dcbb..3d275d34e2f7d 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -6,8 +6,8 @@ import Joi from 'joi'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; -import { Logger } from '../../../types'; -import { ReportingConfig, ReportingSetupDeps } from '../../types'; +import { Logger, ServerFacade } from '../../../types'; +import { ReportingSetupDeps } from '../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; import { GetReportingFeatureIdFn, @@ -29,12 +29,12 @@ export type GetRouteConfigFactoryFn = ( ) => RouteConfigFactory; export function getRouteConfigFactoryReportingPre( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(config, plugins, logger); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(config, plugins, logger); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, plugins, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, plugins, logger); return (getFeatureId?: GetReportingFeatureIdFn): RouteConfigFactory => { const preRouting: any[] = [{ method: authorizedUserPreRouting, assign: 'user' }]; @@ -50,11 +50,11 @@ export function getRouteConfigFactoryReportingPre( } export function getRouteOptionsCsv( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ) { - const getRouteConfig = getRouteConfigFactoryReportingPre(config, plugins, logger); + const getRouteConfig = getRouteConfigFactoryReportingPre(server, plugins, logger); return { ...getRouteConfig(() => CSV_FROM_SAVEDOBJECT_JOB_TYPE), validate: { @@ -75,12 +75,12 @@ export function getRouteOptionsCsv( } export function getRouteConfigFactoryManagementPre( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(config, plugins, logger); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(config, plugins, logger); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, plugins, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, plugins, logger); const managementPreRouting = reportingFeaturePreRouting(() => 'management'); return (): RouteConfigFactory => { @@ -99,11 +99,11 @@ export function getRouteConfigFactoryManagementPre( // Additionally, the range-request doesn't alleviate any performance issues on the server as the entire // download is loaded into memory. export function getRouteConfigFactoryDownloadPre( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG, 'download'], @@ -114,11 +114,11 @@ export function getRouteConfigFactoryDownloadPre( } export function getRouteConfigFactoryDeletePre( - config: ReportingConfig, + server: ServerFacade, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG, 'delete'], diff --git a/x-pack/legacy/plugins/reporting/server/types.d.ts b/x-pack/legacy/plugins/reporting/server/types.d.ts index c773e2d556648..59b7bc2020ad9 100644 --- a/x-pack/legacy/plugins/reporting/server/types.d.ts +++ b/x-pack/legacy/plugins/reporting/server/types.d.ts @@ -11,17 +11,16 @@ import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/ import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; import { ReportingPluginSpecOptions } from '../types'; -import { ReportingConfig, ReportingConfigType } from './core'; export interface ReportingSetupDeps { elasticsearch: ElasticsearchServiceSetup; security: SecurityPluginSetup; usageCollection: UsageCollectionSetup; - reporting: { config: ReportingConfig }; __LEGACY: LegacySetup; } export interface ReportingStartDeps { + elasticsearch: ElasticsearchServiceSetup; data: DataPluginStart; __LEGACY: LegacySetup; } @@ -31,7 +30,10 @@ export type ReportingSetup = object; export type ReportingStart = object; export interface LegacySetup { + config: Legacy.Server['config']; + info: Legacy.Server['info']; plugins: { + elasticsearch: Legacy.Server['plugins']['elasticsearch']; xpack_main: XPackMainPlugin & { status?: any; }; @@ -40,7 +42,4 @@ export interface LegacySetup { route: Legacy.Server['route']; } -export { ReportingConfig, ReportingCore } from './core'; - -export type CaptureConfig = ReportingConfigType['capture']; -export type ScrollConfig = ReportingConfigType['csv']['scroll']; +export { ReportingCore } from './core'; diff --git a/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts b/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts index 5f12f2b7f044d..bd2d0cb835a79 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts @@ -5,10 +5,7 @@ */ import { get } from 'lodash'; -import { ESCallCluster, ExportTypesRegistry } from '../../types'; -import { ReportingConfig, ReportingSetupDeps } from '../types'; -import { decorateRangeStats } from './decorate_range_stats'; -import { getExportTypesHandler } from './get_export_type_handler'; +import { ServerFacade, ExportTypesRegistry, ESCallCluster } from '../../types'; import { AggregationBuckets, AggregationResults, @@ -18,6 +15,8 @@ import { RangeAggregationResults, RangeStats, } from './types'; +import { decorateRangeStats } from './decorate_range_stats'; +import { getExportTypesHandler } from './get_export_type_handler'; const JOB_TYPES_KEY = 'jobTypes'; const JOB_TYPES_FIELD = 'jobtype'; @@ -80,7 +79,10 @@ type RangeStatSets = Partial< last7Days: RangeStats; } >; -async function handleResponse(response: AggregationResults): Promise { +async function handleResponse( + server: ServerFacade, + response: AggregationResults +): Promise { const buckets = get(response, 'aggregations.ranges.buckets'); if (!buckets) { return {}; @@ -99,12 +101,12 @@ async function handleResponse(response: AggregationResults): Promise handleResponse(response)) + .then((response: AggregationResults) => handleResponse(server, response)) .then((usage: RangeStatSets) => { // Allow this to explicitly throw an exception if/when this config is deprecated, // because we shouldn't collect browserType in that case! - const browserType = config.get('capture', 'browser', 'type'); + const browserType = config.get('xpack.reporting.capture.browser.type'); + const xpackInfo = server.plugins.xpack_main.info; const exportTypesHandler = getExportTypesHandler(exportTypesRegistry); - const availability = exportTypesHandler.getAvailability( - xpackMainInfo - ) as FeatureAvailabilityMap; + const availability = exportTypesHandler.getAvailability(xpackInfo) as FeatureAvailabilityMap; const { lastDay, last7Days, ...all } = usage; diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js index 905d2fe9b995c..a6d753f9b107a 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js @@ -24,52 +24,62 @@ function getMockUsageCollection() { makeUsageCollector: options => { return new MockUsageCollector(this, options); }, - registerCollector: sinon.stub(), }; } -function getPluginsMock( - { license, usageCollection = getMockUsageCollection() } = { license: 'platinum' } -) { - const mockXpackMain = { - info: { - isAvailable: sinon.stub().returns(true), - feature: () => ({ - getLicenseCheckResults: sinon.stub(), - }), - license: { - isOneOf: sinon.stub().returns(false), - getType: sinon.stub().returns(license), +function getServerMock(customization) { + const getLicenseCheckResults = sinon.stub().returns({}); + const defaultServerMock = { + plugins: { + security: { + isAuthenticated: sinon.stub().returns(true), }, - toJSON: () => ({ b: 1 }), - }, - }; - return { - usageCollection, - __LEGACY: { - plugins: { - xpack_main: mockXpackMain, + xpack_main: { + info: { + isAvailable: sinon.stub().returns(true), + feature: () => ({ + getLicenseCheckResults, + }), + license: { + isOneOf: sinon.stub().returns(false), + getType: sinon.stub().returns('platinum'), + }, + toJSON: () => ({ b: 1 }), + }, }, }, + log: () => {}, + config: () => ({ + get: key => { + if (key === 'xpack.reporting.enabled') { + return true; + } else if (key === 'xpack.reporting.index') { + return '.reporting-index'; + } + }, + }), }; + return Object.assign(defaultServerMock, customization); } const getResponseMock = (customization = {}) => customization; describe('license checks', () => { - let mockConfig; - beforeAll(async () => { - const mockReporting = await createMockReportingCore(); - mockConfig = await mockReporting.getConfig(); - }); - describe('with a basic license', () => { let usageStats; beforeAll(async () => { - const plugins = getPluginsMock({ license: 'basic' }); + const serverWithBasicLicenseMock = getServerMock(); + serverWithBasicLicenseMock.plugins.xpack_main.info.license.getType = sinon + .stub() + .returns('basic'); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const { fetch } = getReportingUsageCollector(mockConfig, plugins, exportTypesRegistry); - usageStats = await fetch(callClusterMock, exportTypesRegistry); + const usageCollection = getMockUsageCollection(); + const { fetch: getReportingUsage } = getReportingUsageCollector( + serverWithBasicLicenseMock, + usageCollection, + exportTypesRegistry + ); + usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -88,10 +98,18 @@ describe('license checks', () => { describe('with no license', () => { let usageStats; beforeAll(async () => { - const plugins = getPluginsMock({ license: 'none' }); + const serverWithNoLicenseMock = getServerMock(); + serverWithNoLicenseMock.plugins.xpack_main.info.license.getType = sinon + .stub() + .returns('none'); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const { fetch } = getReportingUsageCollector(mockConfig, plugins, exportTypesRegistry); - usageStats = await fetch(callClusterMock, exportTypesRegistry); + const usageCollection = getMockUsageCollection(); + const { fetch: getReportingUsage } = getReportingUsageCollector( + serverWithNoLicenseMock, + usageCollection, + exportTypesRegistry + ); + usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -110,10 +128,18 @@ describe('license checks', () => { describe('with platinum license', () => { let usageStats; beforeAll(async () => { - const plugins = getPluginsMock({ license: 'platinum' }); + const serverWithPlatinumLicenseMock = getServerMock(); + serverWithPlatinumLicenseMock.plugins.xpack_main.info.license.getType = sinon + .stub() + .returns('platinum'); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const { fetch } = getReportingUsageCollector(mockConfig, plugins, exportTypesRegistry); - usageStats = await fetch(callClusterMock, exportTypesRegistry); + const usageCollection = getMockUsageCollection(); + const { fetch: getReportingUsage } = getReportingUsageCollector( + serverWithPlatinumLicenseMock, + usageCollection, + exportTypesRegistry + ); + usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -132,10 +158,18 @@ describe('license checks', () => { describe('with no usage data', () => { let usageStats; beforeAll(async () => { - const plugins = getPluginsMock({ license: 'basic' }); + const serverWithBasicLicenseMock = getServerMock(); + serverWithBasicLicenseMock.plugins.xpack_main.info.license.getType = sinon + .stub() + .returns('basic'); const callClusterMock = jest.fn(() => Promise.resolve({})); - const { fetch } = getReportingUsageCollector(mockConfig, plugins, exportTypesRegistry); - usageStats = await fetch(callClusterMock, exportTypesRegistry); + const usageCollection = getMockUsageCollection(); + const { fetch: getReportingUsage } = getReportingUsageCollector( + serverWithBasicLicenseMock, + usageCollection, + exportTypesRegistry + ); + usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -149,11 +183,21 @@ describe('license checks', () => { }); describe('data modeling', () => { + let getReportingUsage; + beforeAll(async () => { + const usageCollection = getMockUsageCollection(); + const serverWithPlatinumLicenseMock = getServerMock(); + serverWithPlatinumLicenseMock.plugins.xpack_main.info.license.getType = sinon + .stub() + .returns('platinum'); + ({ fetch: getReportingUsage } = getReportingUsageCollector( + serverWithPlatinumLicenseMock, + usageCollection, + exportTypesRegistry + )); + }); + test('with normal looking usage data', async () => { - const mockReporting = await createMockReportingCore(); - const mockConfig = await mockReporting.getConfig(); - const plugins = getPluginsMock(); - const { fetch } = getReportingUsageCollector(mockConfig, plugins, exportTypesRegistry); const callClusterMock = jest.fn(() => Promise.resolve( getResponseMock({ @@ -276,7 +320,7 @@ describe('data modeling', () => { ) ); - const usageStats = await fetch(callClusterMock); + const usageStats = await getReportingUsage(callClusterMock); expect(usageStats).toMatchInlineSnapshot(` Object { "PNG": Object { @@ -371,16 +415,20 @@ describe('data modeling', () => { }); describe('Ready for collection observable', () => { - test('converts observable to promise', async () => { - const mockReporting = await createMockReportingCore(); - const mockConfig = await mockReporting.getConfig(); + let mockReporting; - const usageCollection = getMockUsageCollection(); - const makeCollectorSpy = sinon.spy(); - usageCollection.makeUsageCollector = makeCollectorSpy; + beforeEach(async () => { + mockReporting = await createMockReportingCore(); + }); - const plugins = getPluginsMock({ usageCollection }); - registerReportingUsageCollector(mockReporting, mockConfig, plugins); + test('converts observable to promise', async () => { + const serverWithBasicLicenseMock = getServerMock(); + const makeCollectorSpy = sinon.spy(); + const usageCollection = { + makeUsageCollector: makeCollectorSpy, + registerCollector: sinon.stub(), + }; + registerReportingUsageCollector(mockReporting, serverWithBasicLicenseMock, usageCollection); const [args] = makeCollectorSpy.firstCall.args; expect(args).toMatchInlineSnapshot(` diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts index ab4ec3a0edf57..14202530fb6c7 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts @@ -4,9 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ +import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; import { KIBANA_REPORTING_TYPE } from '../../common/constants'; -import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../../server/types'; -import { ESCallCluster, ExportTypesRegistry } from '../../types'; +import { ReportingCore } from '../../server'; +import { ESCallCluster, ExportTypesRegistry, ServerFacade } from '../../types'; import { getReportingUsage } from './get_reporting_usage'; import { RangeStats } from './types'; @@ -14,19 +15,19 @@ import { RangeStats } from './types'; const METATYPE = 'kibana_stats'; /* + * @param {Object} server * @return {Object} kibana usage stats type collection object */ export function getReportingUsageCollector( - config: ReportingConfig, - plugins: ReportingSetupDeps, + server: ServerFacade, + usageCollection: UsageCollectionSetup, exportTypesRegistry: ExportTypesRegistry, isReady: () => Promise ) { - const { usageCollection } = plugins; return usageCollection.makeUsageCollector({ type: KIBANA_REPORTING_TYPE, fetch: (callCluster: ESCallCluster) => - getReportingUsage(config, plugins, callCluster, exportTypesRegistry), + getReportingUsage(server, callCluster, exportTypesRegistry), isReady, /* @@ -51,17 +52,17 @@ export function getReportingUsageCollector( export function registerReportingUsageCollector( reporting: ReportingCore, - config: ReportingConfig, - plugins: ReportingSetupDeps + server: ServerFacade, + usageCollection: UsageCollectionSetup ) { const exportTypesRegistry = reporting.getExportTypesRegistry(); const collectionIsReady = reporting.pluginHasStarted.bind(reporting); const collector = getReportingUsageCollector( - config, - plugins, + server, + usageCollection, exportTypesRegistry, collectionIsReady ); - plugins.usageCollection.registerCollector(collector); + usageCollection.registerCollector(collector); } diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts index 930aa7601b8cb..883276d43e27e 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts @@ -10,8 +10,7 @@ import * as contexts from '../export_types/common/lib/screenshots/constants'; import { ElementsPositionAndAttribute } from '../export_types/common/lib/screenshots/types'; import { HeadlessChromiumDriver, HeadlessChromiumDriverFactory } from '../server/browsers'; import { createDriverFactory } from '../server/browsers/chromium'; -import { CaptureConfig } from '../server/types'; -import { Logger } from '../types'; +import { BrowserConfig, CaptureConfig, Logger } from '../types'; interface CreateMockBrowserDriverFactoryOpts { evaluate: jest.Mock, any[]>; @@ -94,34 +93,24 @@ export const createMockBrowserDriverFactory = async ( logger: Logger, opts: Partial ): Promise => { - const captureConfig = { - timeouts: { openUrl: 30000, waitForElements: 30000, renderComplete: 30000 }, - browser: { - type: 'chromium', - chromium: { - inspect: false, - disableSandbox: false, - userDataDir: '/usr/data/dir', - viewport: { width: 12, height: 12 }, - proxy: { enabled: false, server: undefined, bypass: undefined }, - }, - autoDownload: false, - inspect: true, - userDataDir: '/usr/data/dir', - viewport: { width: 12, height: 12 }, - disableSandbox: false, - proxy: { enabled: false, server: undefined, bypass: undefined }, - maxScreenshotDimension: undefined, - }, - networkPolicy: { enabled: true, rules: [] }, - viewport: { width: 800, height: 600 }, - loadDelay: 2000, - zoom: 1, - maxAttempts: 1, - } as CaptureConfig; + const browserConfig = { + inspect: true, + userDataDir: '/usr/data/dir', + viewport: { width: 12, height: 12 }, + disableSandbox: false, + proxy: { enabled: false }, + } as BrowserConfig; const binaryPath = '/usr/local/share/common/secure/'; - const mockBrowserDriverFactory = await createDriverFactory(binaryPath, logger, captureConfig); + const captureConfig = { networkPolicy: {}, timeouts: {} } as CaptureConfig; + + const mockBrowserDriverFactory = await createDriverFactory( + binaryPath, + logger, + browserConfig, + captureConfig + ); + const mockPage = {} as Page; const mockBrowserDriver = new HeadlessChromiumDriver(mockPage, { inspect: true, diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts index be60b56dcc0c1..0250e6c0a9afd 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LayoutTypes } from '../export_types/common/constants'; import { createLayout } from '../export_types/common/layouts'; +import { LayoutTypes } from '../export_types/common/constants'; import { LayoutInstance } from '../export_types/common/layouts/layout'; -import { CaptureConfig } from '../server/types'; +import { ServerFacade } from '../types'; -export const createMockLayoutInstance = (captureConfig: CaptureConfig) => { - const mockLayout = createLayout(captureConfig, { +export const createMockLayoutInstance = (__LEGACY: ServerFacade) => { + const mockLayout = createLayout(__LEGACY, { id: LayoutTypes.PRESERVE_LAYOUT, dimensions: { height: 12, width: 12 }, }) as LayoutInstance; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts index 332b37b58cb7d..2cd129d47b3f9 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts @@ -19,24 +19,16 @@ import { coreMock } from 'src/core/server/mocks'; import { ReportingPlugin, ReportingCore } from '../server'; import { ReportingSetupDeps, ReportingStartDeps } from '../server/types'; -const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => { - const configGetStub = jest.fn(); - return { - elasticsearch: setupMock.elasticsearch, - security: setupMock.security, - usageCollection: {} as any, - reporting: { - config: { - get: configGetStub, - kbnConfig: { get: configGetStub }, - }, - }, - __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, - }; -}; +export const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => ({ + elasticsearch: setupMock.elasticsearch, + security: setupMock.security, + usageCollection: {} as any, + __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, +}); export const createMockStartDeps = (startMock?: any): ReportingStartDeps => ({ data: startMock.data, + elasticsearch: startMock.elasticsearch, __LEGACY: {} as any, }); diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts index 531e1dcaf84e0..bb7851ba036a9 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts @@ -3,10 +3,36 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - +import { memoize } from 'lodash'; import { ServerFacade } from '../types'; -export const createMockServer = (): ServerFacade => { - const mockServer = {}; - return mockServer as any; +export const createMockServer = ({ settings = {} }: any): ServerFacade => { + const mockServer = { + config: memoize(() => ({ get: jest.fn() })), + info: { + protocol: 'http', + }, + plugins: { + elasticsearch: { + getCluster: memoize(() => { + return { + callWithRequest: jest.fn(), + }; + }), + }, + }, + }; + + const defaultSettings: any = { + 'xpack.reporting.encryptionKey': 'testencryptionkey', + 'server.basePath': '/sbp', + 'server.host': 'localhost', + 'server.port': 5601, + 'xpack.reporting.kibanaServer': {}, + }; + mockServer.config().get.mockImplementation((key: any) => { + return key in settings ? settings[key] : defaultSettings[key]; + }); + + return (mockServer as unknown) as ServerFacade; }; diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index 76253752be1b7..238079ba92a29 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -7,11 +7,14 @@ import { EventEmitter } from 'events'; import { ResponseObject } from 'hapi'; import { Legacy } from 'kibana'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CallCluster } from '../../../../src/legacy/core_plugins/elasticsearch'; import { CancellationToken } from './common/cancellation_token'; -import { ReportingCore } from './server/core'; +import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; +import { BrowserType } from './server/browsers/types'; import { LevelLogger } from './server/lib/level_logger'; -import { LegacySetup } from './server/types'; +import { ReportingCore } from './server/core'; +import { LegacySetup, ReportingStartDeps, ReportingSetup, ReportingStart } from './server/types'; export type Job = EventEmitter & { id: string; @@ -22,8 +25,8 @@ export type Job = EventEmitter & { export interface NetworkPolicyRule { allow: boolean; - protocol?: string; - host?: string; + protocol: string; + host: string; } export interface NetworkPolicy { @@ -90,6 +93,51 @@ export type ReportingResponseToolkit = Legacy.ResponseToolkit; export type ESCallCluster = CallCluster; +/* + * Reporting Config + */ + +export interface CaptureConfig { + browser: { + type: BrowserType; + autoDownload: boolean; + chromium: BrowserConfig; + }; + maxAttempts: number; + networkPolicy: NetworkPolicy; + loadDelay: number; + timeouts: { + openUrl: number; + waitForElements: number; + renderComplet: number; + }; +} + +export interface BrowserConfig { + inspect: boolean; + userDataDir: string; + viewport: { width: number; height: number }; + disableSandbox: boolean; + proxy: { + enabled: boolean; + server: string; + bypass?: string[]; + }; +} + +export interface QueueConfig { + indexInterval: string; + pollEnabled: boolean; + pollInterval: number; + pollIntervalErrorMultiplier: number; + timeout: number; +} + +export interface ScrollConfig { + duration: string; + size: number; +} + export interface ElementPosition { boundingClientRect: { // modern browsers support x/y, but older ones don't @@ -226,10 +274,14 @@ export interface ESQueueInstance { export type CreateJobFactory = ( reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger -) => Promise; +) => CreateJobFnType; export type ExecuteJobFactory = ( reporting: ReportingCore, + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger ) => Promise; diff --git a/x-pack/plugins/reporting/config.ts b/x-pack/plugins/reporting/config.ts new file mode 100644 index 0000000000000..f1d6b1a8f248f --- /dev/null +++ b/x-pack/plugins/reporting/config.ts @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export const reportingPollConfig = { + jobCompletionNotifier: { interval: 10000, intervalErrorMultiplier: 5 }, + jobsRefresh: { interval: 5000, intervalErrorMultiplier: 5 }, +}; diff --git a/x-pack/plugins/reporting/kibana.json b/x-pack/plugins/reporting/kibana.json index d330eb9b7872a..a7e2bd288f0b1 100644 --- a/x-pack/plugins/reporting/kibana.json +++ b/x-pack/plugins/reporting/kibana.json @@ -1,11 +1,7 @@ { - "configPath": [ "xpack", "reporting" ], "id": "reporting", "version": "8.0.0", "kibanaVersion": "kibana", - "optionalPlugins": [ - "usageCollection" - ], "requiredPlugins": [ "home", "management", @@ -15,6 +11,6 @@ "share", "kibanaLegacy" ], - "server": true, + "server": false, "ui": true } diff --git a/x-pack/plugins/reporting/server/config/index.test.ts b/x-pack/plugins/reporting/server/config/index.test.ts deleted file mode 100644 index 08fe2c5861311..0000000000000 --- a/x-pack/plugins/reporting/server/config/index.test.ts +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import * as Rx from 'rxjs'; -import { CoreSetup, Logger, PluginInitializerContext } from '../../../../../src/core/server'; -import { createConfig$ } from './'; - -interface KibanaServer { - host?: string; - port?: number; - protocol?: string; -} -interface ReportingKibanaServer { - hostname?: string; - port?: number; - protocol?: string; -} - -const makeMockInitContext = (config: { - encryptionKey?: string; - kibanaServer: ReportingKibanaServer; -}): PluginInitializerContext => - ({ - config: { create: () => Rx.of(config) }, - } as PluginInitializerContext); - -const makeMockCoreSetup = (serverInfo: KibanaServer): CoreSetup => - ({ http: { getServerInfo: () => serverInfo } } as any); - -describe('Reporting server createConfig$', () => { - let mockCoreSetup: CoreSetup; - let mockInitContext: PluginInitializerContext; - let mockLogger: Logger; - - beforeEach(() => { - mockCoreSetup = makeMockCoreSetup({ host: 'kibanaHost', port: 5601, protocol: 'http' }); - mockInitContext = makeMockInitContext({ - kibanaServer: {}, - }); - mockLogger = ({ warn: jest.fn() } as unknown) as Logger; - }); - - afterEach(() => { - jest.resetAllMocks(); - }); - - it('creates random encryption key and default config using host, protocol, and port from server info', async () => { - const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise(); - - expect(result.encryptionKey).toMatch(/\S{32,}/); - expect(result.kibanaServer).toMatchInlineSnapshot(` - Object { - "hostname": "kibanaHost", - "port": 5601, - "protocol": "http", - } - `); - expect((mockLogger.warn as any).mock.calls.length).toBe(1); - expect((mockLogger.warn as any).mock.calls[0]).toMatchObject([ - 'Generating a random key for xpack.reporting.encryptionKey. To prevent sessions from being invalidated on restart, please set xpack.reporting.encryptionKey in kibana.yml', - ]); - }); - - it('uses the encryption key', async () => { - mockInitContext = makeMockInitContext({ - encryptionKey: 'iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii', - kibanaServer: {}, - }); - const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise(); - - expect(result.encryptionKey).toMatch('iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii'); - expect((mockLogger.warn as any).mock.calls.length).toBe(0); - }); - - it('uses the encryption key, reporting kibanaServer settings to override server info', async () => { - mockInitContext = makeMockInitContext({ - encryptionKey: 'iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii', - kibanaServer: { - hostname: 'reportingHost', - port: 5677, - protocol: 'httpsa', - }, - }); - const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise(); - - expect(result).toMatchInlineSnapshot(` - Object { - "encryptionKey": "iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii", - "kibanaServer": Object { - "hostname": "reportingHost", - "port": 5677, - "protocol": "httpsa", - }, - } - `); - expect((mockLogger.warn as any).mock.calls.length).toBe(0); - }); - - it('show warning when kibanaServer.hostName === "0"', async () => { - mockInitContext = makeMockInitContext({ - encryptionKey: 'aaaaaaaaaaaaabbbbbbbbbbbbaaaaaaaaa', - kibanaServer: { hostname: '0' }, - }); - const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise(); - - expect(result.kibanaServer).toMatchInlineSnapshot(` - Object { - "hostname": "0.0.0.0", - "port": 5601, - "protocol": "http", - } - `); - expect((mockLogger.warn as any).mock.calls.length).toBe(1); - expect((mockLogger.warn as any).mock.calls[0]).toMatchObject([ - `Found 'server.host: \"0\" in Kibana configuration. This is incompatible with Reporting. To enable Reporting to work, 'xpack.reporting.kibanaServer.hostname: 0.0.0.0' is being automatically ` + - `to the configuration. You can change the setting to 'server.host: 0.0.0.0' or add 'xpack.reporting.kibanaServer.hostname: 0.0.0.0' in kibana.yml to prevent this message.`, - ]); - }); -}); diff --git a/x-pack/plugins/reporting/server/config/index.ts b/x-pack/plugins/reporting/server/config/index.ts deleted file mode 100644 index ac51b39ae23b4..0000000000000 --- a/x-pack/plugins/reporting/server/config/index.ts +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { i18n } from '@kbn/i18n/'; -import { TypeOf } from '@kbn/config-schema'; -import crypto from 'crypto'; -import { map } from 'rxjs/operators'; -import { PluginConfigDescriptor } from 'kibana/server'; -import { CoreSetup, Logger, PluginInitializerContext } from '../../../../../src/core/server'; -import { ConfigSchema, ConfigType } from './schema'; - -export function createConfig$(core: CoreSetup, context: PluginInitializerContext, logger: Logger) { - return context.config.create>().pipe( - map(config => { - // encryption key - let encryptionKey = config.encryptionKey; - if (encryptionKey === undefined) { - logger.warn( - i18n.translate('xpack.reporting.serverConfig.randomEncryptionKey', { - defaultMessage: - 'Generating a random key for xpack.reporting.encryptionKey. To prevent sessions from being invalidated on ' + - 'restart, please set xpack.reporting.encryptionKey in kibana.yml', - }) - ); - encryptionKey = crypto.randomBytes(16).toString('hex'); - } - - const { kibanaServer: reportingServer } = config; - const serverInfo = core.http.getServerInfo(); - - // kibanaServer.hostname, default to server.host, don't allow "0" - let kibanaServerHostname = reportingServer.hostname - ? reportingServer.hostname - : serverInfo.host; - if (kibanaServerHostname === '0') { - logger.warn( - i18n.translate('xpack.reporting.serverConfig.invalidServerHostname', { - defaultMessage: - `Found 'server.host: "0" in Kibana configuration. This is incompatible with Reporting. ` + - `To enable Reporting to work, '{configKey}: 0.0.0.0' is being automatically to the configuration. ` + - `You can change the setting to 'server.host: 0.0.0.0' or add '{configKey}: 0.0.0.0' in kibana.yml to prevent this message.`, - values: { configKey: 'xpack.reporting.kibanaServer.hostname' }, - }) - ); - kibanaServerHostname = '0.0.0.0'; - } - - // kibanaServer.port, default to server.port - const kibanaServerPort = reportingServer.port - ? reportingServer.port - : serverInfo.port; // prettier-ignore - - // kibanaServer.protocol, default to server.protocol - const kibanaServerProtocol = reportingServer.protocol - ? reportingServer.protocol - : serverInfo.protocol; - - return { - ...config, - encryptionKey, - kibanaServer: { - hostname: kibanaServerHostname, - port: kibanaServerPort, - protocol: kibanaServerProtocol, - }, - }; - }) - ); -} - -export const config: PluginConfigDescriptor = { - schema: ConfigSchema, - deprecations: ({ unused }) => [ - unused('capture.browser.chromium.maxScreenshotDimension'), - unused('capture.concurrency'), - unused('capture.settleTime'), - unused('capture.timeout'), - unused('kibanaApp'), - ], -}; - -export { ConfigSchema, ConfigType }; diff --git a/x-pack/plugins/reporting/server/config/schema.test.ts b/x-pack/plugins/reporting/server/config/schema.test.ts deleted file mode 100644 index d8fe6d1ff084a..0000000000000 --- a/x-pack/plugins/reporting/server/config/schema.test.ts +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ConfigSchema } from './schema'; - -describe('Reporting Config Schema', () => { - it(`context {"dev":false,"dist":false} produces correct config`, () => { - expect(ConfigSchema.validate({}, { dev: false, dist: false })).toMatchObject({ - capture: { - browser: { - autoDownload: true, - chromium: { disableSandbox: false, proxy: { enabled: false } }, - type: 'chromium', - }, - loadDelay: 3000, - maxAttempts: 1, - networkPolicy: { - enabled: true, - rules: [ - { allow: true, host: undefined, protocol: 'http:' }, - { allow: true, host: undefined, protocol: 'https:' }, - { allow: true, host: undefined, protocol: 'ws:' }, - { allow: true, host: undefined, protocol: 'wss:' }, - { allow: true, host: undefined, protocol: 'data:' }, - { allow: false, host: undefined, protocol: undefined }, - ], - }, - viewport: { height: 1200, width: 1950 }, - zoom: 2, - }, - csv: { - checkForFormulas: true, - enablePanelActionDownload: true, - maxSizeBytes: 10485760, - scroll: { duration: '30s', size: 500 }, - }, - encryptionKey: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - index: '.reporting', - kibanaServer: {}, - poll: { - jobCompletionNotifier: { interval: 10000, intervalErrorMultiplier: 5 }, - jobsRefresh: { interval: 5000, intervalErrorMultiplier: 5 }, - }, - queue: { - indexInterval: 'week', - pollEnabled: true, - pollInterval: 3000, - pollIntervalErrorMultiplier: 10, - timeout: 120000, - }, - roles: { allow: ['reporting_user'] }, - }); - }); - it(`context {"dev":false,"dist":true} produces correct config`, () => { - expect(ConfigSchema.validate({}, { dev: false, dist: true })).toMatchObject({ - capture: { - browser: { - autoDownload: false, - chromium: { disableSandbox: false, inspect: false, proxy: { enabled: false } }, - type: 'chromium', - }, - loadDelay: 3000, - maxAttempts: 3, - networkPolicy: { - enabled: true, - rules: [ - { allow: true, host: undefined, protocol: 'http:' }, - { allow: true, host: undefined, protocol: 'https:' }, - { allow: true, host: undefined, protocol: 'ws:' }, - { allow: true, host: undefined, protocol: 'wss:' }, - { allow: true, host: undefined, protocol: 'data:' }, - { allow: false, host: undefined, protocol: undefined }, - ], - }, - viewport: { height: 1200, width: 1950 }, - zoom: 2, - }, - csv: { - checkForFormulas: true, - enablePanelActionDownload: true, - maxSizeBytes: 10485760, - scroll: { duration: '30s', size: 500 }, - }, - index: '.reporting', - kibanaServer: {}, - poll: { - jobCompletionNotifier: { interval: 10000, intervalErrorMultiplier: 5 }, - jobsRefresh: { interval: 5000, intervalErrorMultiplier: 5 }, - }, - queue: { - indexInterval: 'week', - pollEnabled: true, - pollInterval: 3000, - pollIntervalErrorMultiplier: 10, - timeout: 120000, - }, - roles: { allow: ['reporting_user'] }, - }); - }); -}); diff --git a/x-pack/plugins/reporting/server/config/schema.ts b/x-pack/plugins/reporting/server/config/schema.ts deleted file mode 100644 index 0058b7a5096f0..0000000000000 --- a/x-pack/plugins/reporting/server/config/schema.ts +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { schema, TypeOf } from '@kbn/config-schema'; -import moment from 'moment'; - -const KibanaServerSchema = schema.object({ - hostname: schema.maybe( - schema.string({ - validate(value) { - if (value === '0') { - return 'must not be "0" for the headless browser to correctly resolve the host'; - } - }, - hostname: true, - }) - ), - port: schema.maybe(schema.number()), - protocol: schema.maybe( - schema.string({ - validate(value) { - if (!/^https?$/.test(value)) { - return 'must be "http" or "https"'; - } - }, - }) - ), -}); - -const QueueSchema = schema.object({ - indexInterval: schema.string({ defaultValue: 'week' }), - pollEnabled: schema.boolean({ defaultValue: true }), - pollInterval: schema.number({ defaultValue: 3000 }), - pollIntervalErrorMultiplier: schema.number({ defaultValue: 10 }), - timeout: schema.number({ defaultValue: moment.duration(2, 'm').asMilliseconds() }), -}); - -const RulesSchema = schema.object({ - allow: schema.boolean(), - host: schema.maybe(schema.string()), - protocol: schema.maybe(schema.string()), -}); - -const CaptureSchema = schema.object({ - timeouts: schema.object({ - openUrl: schema.number({ defaultValue: 30000 }), - waitForElements: schema.number({ defaultValue: 30000 }), - renderComplete: schema.number({ defaultValue: 30000 }), - }), - networkPolicy: schema.object({ - enabled: schema.boolean({ defaultValue: true }), - rules: schema.arrayOf(RulesSchema, { - defaultValue: [ - { host: undefined, allow: true, protocol: 'http:' }, - { host: undefined, allow: true, protocol: 'https:' }, - { host: undefined, allow: true, protocol: 'ws:' }, - { host: undefined, allow: true, protocol: 'wss:' }, - { host: undefined, allow: true, protocol: 'data:' }, - { host: undefined, allow: false, protocol: undefined }, // Default action is to deny! - ], - }), - }), - zoom: schema.number({ defaultValue: 2 }), - viewport: schema.object({ - width: schema.number({ defaultValue: 1950 }), - height: schema.number({ defaultValue: 1200 }), - }), - loadDelay: schema.number({ - defaultValue: moment.duration(3, 's').asMilliseconds(), - }), // TODO: use schema.duration - browser: schema.object({ - autoDownload: schema.conditional( - schema.contextRef('dist'), - true, - schema.boolean({ defaultValue: false }), - schema.boolean({ defaultValue: true }) - ), - chromium: schema.object({ - inspect: schema.conditional( - schema.contextRef('dist'), - true, - schema.boolean({ defaultValue: false }), - schema.maybe(schema.never()) - ), - disableSandbox: schema.boolean({ defaultValue: false }), - proxy: schema.object({ - enabled: schema.boolean({ defaultValue: false }), - server: schema.conditional( - schema.siblingRef('enabled'), - true, - schema.uri({ scheme: ['http', 'https'] }), - schema.maybe(schema.never()) - ), - bypass: schema.conditional( - schema.siblingRef('enabled'), - true, - schema.arrayOf(schema.string({ hostname: true })), - schema.maybe(schema.never()) - ), - }), - userDataDir: schema.maybe(schema.string()), // FIXME unused? - }), - type: schema.string({ defaultValue: 'chromium' }), - }), - maxAttempts: schema.conditional( - schema.contextRef('dist'), - true, - schema.number({ defaultValue: 3 }), - schema.number({ defaultValue: 1 }) - ), -}); - -const CsvSchema = schema.object({ - checkForFormulas: schema.boolean({ defaultValue: true }), - enablePanelActionDownload: schema.boolean({ defaultValue: true }), - maxSizeBytes: schema.number({ - defaultValue: 1024 * 1024 * 10, // 10MB - }), // TODO: use schema.byteSize - scroll: schema.object({ - duration: schema.string({ - defaultValue: '30s', - validate(value) { - if (!/^[0-9]+(d|h|m|s|ms|micros|nanos)$/.test(value)) { - return 'must be a duration string'; - } - }, - }), - size: schema.number({ defaultValue: 500 }), - }), -}); - -const EncryptionKeySchema = schema.conditional( - schema.contextRef('dist'), - true, - schema.maybe(schema.string({ minLength: 32 })), - schema.string({ minLength: 32, defaultValue: 'a'.repeat(32) }) -); - -const RolesSchema = schema.object({ - allow: schema.arrayOf(schema.string(), { defaultValue: ['reporting_user'] }), -}); - -const IndexSchema = schema.string({ defaultValue: '.reporting' }); - -const PollSchema = schema.object({ - jobCompletionNotifier: schema.object({ - interval: schema.number({ - defaultValue: moment.duration(10, 's').asMilliseconds(), - }), // TODO: use schema.duration - intervalErrorMultiplier: schema.number({ defaultValue: 5 }), - }), - jobsRefresh: schema.object({ - interval: schema.number({ - defaultValue: moment.duration(5, 's').asMilliseconds(), - }), // TODO: use schema.duration - intervalErrorMultiplier: schema.number({ defaultValue: 5 }), - }), -}); - -export const ConfigSchema = schema.object({ - kibanaServer: KibanaServerSchema, - queue: QueueSchema, - capture: CaptureSchema, - csv: CsvSchema, - encryptionKey: EncryptionKeySchema, - roles: RolesSchema, - index: IndexSchema, - poll: PollSchema, -}); - -export type ConfigType = TypeOf; diff --git a/x-pack/plugins/reporting/server/index.ts b/x-pack/plugins/reporting/server/index.ts deleted file mode 100644 index 2b1844cf2e10e..0000000000000 --- a/x-pack/plugins/reporting/server/index.ts +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { PluginInitializerContext } from 'src/core/server'; -import { ReportingPlugin } from './plugin'; - -export { config, ConfigSchema } from './config'; -export { ConfigType, PluginsSetup } from './plugin'; - -export const plugin = (initializerContext: PluginInitializerContext) => - new ReportingPlugin(initializerContext); diff --git a/x-pack/plugins/reporting/server/plugin.ts b/x-pack/plugins/reporting/server/plugin.ts deleted file mode 100644 index 53d821cffbb1f..0000000000000 --- a/x-pack/plugins/reporting/server/plugin.ts +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { Observable } from 'rxjs'; -import { first } from 'rxjs/operators'; -import { CoreSetup, Logger, Plugin, PluginInitializerContext } from '../../../../src/core/server'; -import { ConfigType, createConfig$ } from './config'; - -export interface PluginsSetup { - /** @deprecated */ - __legacy: { - config$: Observable; - }; -} - -export class ReportingPlugin implements Plugin { - private readonly log: Logger; - - constructor(private readonly initializerContext: PluginInitializerContext) { - this.log = this.initializerContext.logger.get(); - } - - public async setup(core: CoreSetup): Promise { - return { - __legacy: { - config$: createConfig$(core, this.initializerContext, this.log).pipe(first()), - }, - }; - } - - public start() {} - public stop() {} -} - -export { ConfigType };