Skip to content

Commit

Permalink
Added e2e tests to fialure store support
Browse files Browse the repository at this point in the history
  • Loading branch information
yngrdyn committed Nov 21, 2024
1 parent ad7d150 commit 5f1b389
Show file tree
Hide file tree
Showing 8 changed files with 275 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@
import { Client, estypes } from '@elastic/elasticsearch';
import { pipeline, Readable } from 'stream';
import { LogDocument } from '@kbn/apm-synthtrace-client/src/lib/logs';
import { IngestProcessorContainer, MappingTypeMapping } from '@elastic/elasticsearch/lib/api/types';
import {
IndicesIndexSettings,
IngestProcessorContainer,
MappingTypeMapping,
} from '@elastic/elasticsearch/lib/api/types';
import { ValuesType } from 'utility-types';
import { SynthtraceEsClient, SynthtraceEsClientOptions } from '../shared/base_client';
import { getSerializeTransform } from '../shared/get_serialize_transform';
Expand Down Expand Up @@ -52,7 +56,11 @@ export class LogsSynthtraceEsClient extends SynthtraceEsClient<LogDocument> {
}
}

async createComponentTemplate(name: string, mappings: MappingTypeMapping) {
async createComponentTemplate(
name: string,
mappings?: MappingTypeMapping,
settings?: IndicesIndexSettings
) {
const isTemplateExisting = await this.client.cluster.existsComponentTemplate({ name });

if (isTemplateExisting) return this.logger.info(`Component template already exists: ${name}`);
Expand All @@ -61,7 +69,8 @@ export class LogsSynthtraceEsClient extends SynthtraceEsClient<LogDocument> {
await this.client.cluster.putComponentTemplate({
name,
template: {
mappings,
...((mappings && { mappings }) || {}),
...((settings && { settings }) || {}),
},
});
this.logger.info(`Component template successfully created: ${name}`);
Expand Down Expand Up @@ -124,16 +133,17 @@ export class LogsSynthtraceEsClient extends SynthtraceEsClient<LogDocument> {
}
}

async createCustomPipeline(processors: IngestProcessorContainer[]) {
async createCustomPipeline(processors: IngestProcessorContainer[], pipelineId?: string) {
const id = pipelineId ?? LogsCustom;
try {
this.client.ingest.putPipeline({
id: LogsCustom,
id,
processors,
version: 1,
});
this.logger.info(`Custom pipeline created: ${LogsCustom}`);
this.logger.info(`Custom pipeline created: ${id}`);
} catch (err) {
this.logger.error(`Custom pipeline creation failed: ${LogsCustom} - ${err.message}`);
this.logger.error(`Custom pipeline creation failed: ${id} - ${err.message}`);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ export default function DocumentTrends({ lastReloadTime }: { lastReloadTime: num
<EuiFlexGroup justifyContent="flexEnd" gutterSize="s">
<EuiFlexItem>
<EuiButtonGroup
data-test-subj="datasetQualityDetailsChartTypeButtonGroup"
legend={i18n.translate('xpack.datasetQuality.details.chartTypeLegend', {
defaultMessage: 'Quality chart type',
})}
Expand Down
65 changes: 63 additions & 2 deletions x-pack/test/functional/apps/dataset_quality/data/logs_data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ export function createLogRecord(
.create()
.dataset(dataset)
.message(msg.message)
.logLevel(isMalformed ? MORE_THAN_1024_CHARS : msg.level)
.logLevel(msg.level)
.service(serviceName)
.namespace(namespace)
.defaults({
Expand Down Expand Up @@ -163,7 +163,7 @@ export function createDegradedFieldsRecord({
.create()
.dataset(dataset)
.message(MESSAGE_LOG_LEVELS[0].message)
.logLevel(MORE_THAN_1024_CHARS)
.logLevel(MESSAGE_LOG_LEVELS[0].level)
.service(SERVICE_NAMES[0])
.namespace(defaultNamespace)
.defaults({
Expand All @@ -189,6 +189,67 @@ export function createDegradedFieldsRecord({
});
}

/*
The helper function generates Failed Docs for the given dataset.
*/
export function createFailedRecords({
to,
count = 1,
dataset,
namespace,
rate = 1, // rate of failed logs (min value 0, max value 1)
}: {
to: string;
count?: number;
dataset: string;
namespace?: string;
rate?: number;
}) {
return timerange(moment(to).subtract(count, 'minute'), moment(to))
.interval('1m')
.rate(1)
.generator((timestamp) => {
return Array(count)
.fill(0)
.flatMap((_, index) => {
const isFailed = index % (count * rate) === 0;
return log
.create()
.dataset(dataset)
.message(MESSAGE_LOG_LEVELS[0].message)
.logLevel(isFailed ? 'anyLevel' : LogLevel.INFO)
.service(SERVICE_NAMES[0])
.namespace(namespace ?? defaultNamespace)
.defaults({
'trace.id': generateShortId(),
'agent.name': 'synth-agent',
})
.timestamp(timestamp);
});
});
}

export const customLogLevelProcessor = [
{
script: {
tag: 'normalize log level',
lang: 'painless',
source: `
String level = ctx['log.level'];
if ('info'.equals(level)) {
ctx['log.level'] = 'info';
} else if ('debug'.equals(level)) {
ctx['log.level'] = 'debug';
} else if ('error'.equals(level)) {
ctx['log.level'] = 'error';
} else {
throw new Exception("Not a valid log level");
}
`,
},
},
];

export const datasetNames = ['synth.1', 'synth.2', 'synth.3'];
export const defaultNamespace = 'default';
export const productionNamespace = 'production';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,12 @@
*/

import expect from '@kbn/expect';
import merge from 'lodash/merge';
import { DatasetQualityFtrProviderContext } from './config';
import {
createDegradedFieldsRecord,
createFailedRecords,
customLogLevelProcessor,
datasetNames,
defaultNamespace,
getInitialTestLogs,
Expand Down Expand Up @@ -54,6 +57,8 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
const regularDataStreamName = `logs-${datasetNames[0]}-${defaultNamespace}`;
const degradedDatasetName = datasetNames[2];
const degradedDataStreamName = `logs-${degradedDatasetName}-${defaultNamespace}`;
const failedDatasetName = datasetNames[1];
const failedDataStreamName = `logs-${failedDatasetName}-${defaultNamespace}`;

describe('Dataset Quality Details', () => {
before(async () => {
Expand All @@ -63,6 +68,25 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
// Install Bitbucket Integration (package which does not has Dashboards) and ingest logs for it
await PageObjects.observabilityLogsExplorer.installPackage(bitbucketPkg);

// Enable failure store for logs
await synthtrace.createCustomPipeline(customLogLevelProcessor, 'logs-apache.access@custom');
await synthtrace.createComponentTemplate('logs-apache.access@custom', undefined, {
'index.default_pipeline': 'logs-apache.access@custom',
});
await synthtrace.updateIndexTemplate(
'logs-apache.access',
(template: Record<string, any>): Record<string, any> => {
const next: Record<string, any> = {
name: 'logs-apache.access',
data_stream: {
failure_store: true,
},
};

return merge({}, template, next);
}
);

await synthtrace.index([
// Ingest basic logs
getInitialTestLogs({ to, count: 4 }),
Expand All @@ -87,6 +111,14 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
namespace: productionNamespace,
isMalformed: true,
}),
// Index failed docs for Apache integration
createFailedRecords({
to: new Date().toISOString(),
count: 10,
dataset: apacheAccessDatasetName,
namespace: productionNamespace,
rate: 0.5,
}),
// Index logs for Bitbucket integration
getLogsForDataset({ to, count: 10, dataset: bitbucketDatasetName }),
]);
Expand Down Expand Up @@ -160,6 +192,19 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
expect(currentUrl).to.not.contain('breakdownField');
});
});

it('reflects the selected quality issue chart state in url', async () => {
await PageObjects.datasetQuality.navigateToDetails({ dataStream: failedDataStreamName });

const charType = 'failedDocs';
await PageObjects.datasetQuality.selectQualityIssuesChartType(charType);

// Wait for URL to contain "qualityIssuesChart:failedDocs"
await retry.tryForTime(5000, async () => {
const currentUrl = await browser.getCurrentUrl();
expect(decodeURIComponent(currentUrl)).to.contain(`qualityIssuesChart:${charType}`);
});
});
});

describe('overview summary panel', () => {
Expand All @@ -168,13 +213,14 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
dataStream: apacheAccessDataStreamName,
});

const { docsCountTotal, degradedDocs, services, hosts, size } =
const { docsCountTotal, degradedDocs, failedDocs, services, hosts, size } =
await PageObjects.datasetQuality.parseOverviewSummaryPanelKpis();
expect(parseInt(docsCountTotal, 10)).to.be(226);
expect(parseInt(docsCountTotal, 10)).to.be(306);
expect(parseInt(degradedDocs, 10)).to.be(1);
expect(parseInt(services, 10)).to.be(3);
expect(parseInt(hosts, 10)).to.be(52);
expect(parseInt(size, 10)).to.be.greaterThan(0);
expect(parseInt(failedDocs, 10)).to.be(20);
});
});

Expand Down Expand Up @@ -371,7 +417,7 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
const rows =
await PageObjects.datasetQuality.getDatasetQualityDetailsDegradedFieldTableRows();

expect(rows.length).to.eql(3);
expect(rows.length).to.eql(2);
});

it('should display Spark Plot for every row of degraded fields', async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,11 @@
*/

import expect from '@kbn/expect';
import merge from 'lodash/merge';
import { DatasetQualityFtrProviderContext } from './config';
import {
createFailedRecords,
customLogLevelProcessor,
datasetNames,
defaultNamespace,
getInitialTestLogs,
Expand All @@ -26,13 +29,30 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
const to = '2024-01-01T12:00:00.000Z';
const apacheAccessDatasetName = 'apache.access';
const apacheAccessDatasetHumanName = 'Apache access logs';
const failedDatasetName = 'synth.failed';
const pkg = {
name: 'apache',
version: '1.14.0',
};

describe('Dataset quality table', () => {
before(async () => {
// Enable failure store for logs
await synthtrace.createCustomPipeline(customLogLevelProcessor);
await synthtrace.updateIndexTemplate(
'logs',
(template: Record<string, any>): Record<string, any> => {
const next: Record<string, any> = {
name: 'logs',
data_stream: {
failure_store: true,
},
};

return merge({}, template, next);
}
);

// Install Integration and ingest logs for it
await PageObjects.observabilityLogsExplorer.installPackage(pkg);
// Ingest basic logs
Expand All @@ -53,6 +73,13 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
dataset: apacheAccessDatasetName,
namespace: productionNamespace,
}),
// Ingest Failed Logs
createFailedRecords({
to: new Date().toISOString(),
count: 10,
dataset: failedDatasetName,
rate: 0.5,
}),
]);
await PageObjects.datasetQuality.navigateTo();
});
Expand Down Expand Up @@ -109,6 +136,14 @@ export default function ({ getService, getPageObjects }: DatasetQualityFtrProvid
expect(degradedDocsColCellTexts).to.eql(['0%', '0%', '0%', '100%']);
});

it('shows failed docs percentage', async () => {
const cols = await PageObjects.datasetQuality.parseDatasetTable();

const failedDocsCol = cols['Failed docs (%)'];
const failedDocsColCellTexts = await failedDocsCol.getCellTexts();
expect(failedDocsColCellTexts).to.eql(['0%', '0%', '0%', '0%', '20%']);
});

it('shows the value in the size column', async () => {
const cols = await PageObjects.datasetQuality.parseDatasetTable();

Expand Down
13 changes: 12 additions & 1 deletion x-pack/test/functional/page_objects/dataset_quality.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ type SummaryPanelKpi = Record<
>;

type SummaryPanelKPI = Record<
'docsCountTotal' | 'size' | 'services' | 'hosts' | 'degradedDocs',
'docsCountTotal' | 'size' | 'services' | 'hosts' | 'degradedDocs' | 'failedDocs',
string
>;

Expand All @@ -70,6 +70,7 @@ const texts = {
services: 'Services',
hosts: 'Hosts',
degradedDocs: 'Degraded docs',
failedDocs: 'Failed docs',
};

export function DatasetQualityPageObject({ getPageObjects, getService }: FtrProviderContext) {
Expand Down Expand Up @@ -138,6 +139,7 @@ export function DatasetQualityPageObject({ getPageObjects, getService }: FtrProv
'datasetQualityDetailsDegradedFieldFlyoutIssueDoesNotExist',
datasetQualityDetailsOverviewDegradedFieldToggleSwitch:
'datasetQualityDetailsOverviewDegradedFieldToggleSwitch',
datasetQualityIssuesChartTypeButtonGroup: 'datasetQualityDetailsChartTypeButtonGroup',
};

return {
Expand Down Expand Up @@ -395,6 +397,7 @@ export function DatasetQualityPageObject({ getPageObjects, getService }: FtrProv
{ title: texts.services, key: 'services' },
{ title: texts.hosts, key: 'hosts' },
{ title: texts.degradedDocs, key: 'degradedDocs' },
{ title: texts.failedDocs, key: 'failedDocs' },
].filter((item) => !excludeKeys.includes(item.key));

const kpiTexts = await Promise.all(
Expand All @@ -415,6 +418,14 @@ export function DatasetQualityPageObject({ getPageObjects, getService }: FtrProv
);
},

async selectQualityIssuesChartType(chartType: 'degradedDocs' | 'failedDocs') {
const datasetDetailsContainer: WebElementWrapper = await testSubjects.find(
testSubjectSelectors.datasetQualityIssuesChartTypeButtonGroup
);
const refreshButton = await datasetDetailsContainer.findByTestSubject(chartType);
return refreshButton.click();
},

/**
* Selects a breakdown field from the unified histogram breakdown selector
* @param fieldText The text of the field to select. Use 'No breakdown' to clear the selection
Expand Down
Loading

0 comments on commit 5f1b389

Please sign in to comment.