-
Notifications
You must be signed in to change notification settings - Fork 8.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
…#195781) # Backport This will backport the following commits from `main` to `8.x`: - [[Dataset quality] Failure store support in synthtrace (#195726)](#195726) <!--- Backport version: 9.4.3 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Yngrid Coello","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-10-10T14:19:52Z","message":"[Dataset quality] Failure store support in synthtrace (#195726)\n\nThis PR enables the creation of scenarios using failure store in\r\nsynthtrace.\r\n\r\n#### How to test?\r\n1. Run the scenario `node scripts/synthtrace failed_logs`\r\n2. Go to dev console\r\n- For getting documents ingested `GET logs-*-*/_search`. This is\r\nequivalent to `GET logs-*-*/_search?failure_store=exclude` and will only\r\ninclude the documents that were properly ingested.\r\n- For getting documents in failure store `GET\r\nlogs-*-*/_search?failure_store=only`\r\n\r\n\r\nhttps://github.com/user-attachments/assets/5013a0af-fdfc-453a-b70c-fb2c452ad4d8","sha":"bf72e414206e7eafedb92b127df7f318604fc78e","branchLabelMapping":{"^v9.0.0$":"main","^v8.16.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","backport:prev-minor","ci:project-deploy-observability","Team:obs-ux-infra_services"],"title":"[Dataset quality] Failure store support in synthtrace","number":195726,"url":"https://github.com/elastic/kibana/pull/195726","mergeCommit":{"message":"[Dataset quality] Failure store support in synthtrace (#195726)\n\nThis PR enables the creation of scenarios using failure store in\r\nsynthtrace.\r\n\r\n#### How to test?\r\n1. Run the scenario `node scripts/synthtrace failed_logs`\r\n2. Go to dev console\r\n- For getting documents ingested `GET logs-*-*/_search`. This is\r\nequivalent to `GET logs-*-*/_search?failure_store=exclude` and will only\r\ninclude the documents that were properly ingested.\r\n- For getting documents in failure store `GET\r\nlogs-*-*/_search?failure_store=only`\r\n\r\n\r\nhttps://github.com/user-attachments/assets/5013a0af-fdfc-453a-b70c-fb2c452ad4d8","sha":"bf72e414206e7eafedb92b127df7f318604fc78e"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/195726","number":195726,"mergeCommit":{"message":"[Dataset quality] Failure store support in synthtrace (#195726)\n\nThis PR enables the creation of scenarios using failure store in\r\nsynthtrace.\r\n\r\n#### How to test?\r\n1. Run the scenario `node scripts/synthtrace failed_logs`\r\n2. Go to dev console\r\n- For getting documents ingested `GET logs-*-*/_search`. This is\r\nequivalent to `GET logs-*-*/_search?failure_store=exclude` and will only\r\ninclude the documents that were properly ingested.\r\n- For getting documents in failure store `GET\r\nlogs-*-*/_search?failure_store=only`\r\n\r\n\r\nhttps://github.com/user-attachments/assets/5013a0af-fdfc-453a-b70c-fb2c452ad4d8","sha":"bf72e414206e7eafedb92b127df7f318604fc78e"}}]}] BACKPORT--> Co-authored-by: Yngrid Coello <[email protected]>
- Loading branch information
1 parent
1719921
commit ac5deb7
Showing
8 changed files
with
263 additions
and
28 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
195 changes: 195 additions & 0 deletions
195
packages/kbn-apm-synthtrace/src/scenarios/failed_logs.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,195 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the "Elastic License | ||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side | ||
* Public License v 1"; you may not use this file except in compliance with, at | ||
* your election, the "Elastic License 2.0", the "GNU Affero General Public | ||
* License v3.0 only", or the "Server Side Public License, v 1". | ||
*/ | ||
|
||
import { LogDocument, log, generateShortId, generateLongId } from '@kbn/apm-synthtrace-client'; | ||
import { merge } from 'lodash'; | ||
import { Scenario } from '../cli/scenario'; | ||
import { IndexTemplateName } from '../lib/logs/custom_logsdb_index_templates'; | ||
import { withClient } from '../lib/utils/with_client'; | ||
import { | ||
getServiceName, | ||
getCluster, | ||
getCloudRegion, | ||
getCloudProvider, | ||
MORE_THAN_1024_CHARS, | ||
} from './helpers/logs_mock_data'; | ||
import { parseLogsScenarioOpts } from './helpers/logs_scenario_opts_parser'; | ||
import { LogsIndex } from '../lib/logs/logs_synthtrace_es_client'; | ||
|
||
const processors = [ | ||
{ | ||
script: { | ||
tag: 'normalize log level', | ||
lang: 'painless', | ||
source: ` | ||
String level = ctx['log.level']; | ||
if ('0'.equals(level)) { | ||
ctx['log.level'] = 'info'; | ||
} else if ('1'.equals(level)) { | ||
ctx['log.level'] = 'debug'; | ||
} else if ('2'.equals(level)) { | ||
ctx['log.level'] = 'warning'; | ||
} else if ('3'.equals(level)) { | ||
ctx['log.level'] = 'error'; | ||
} else { | ||
throw new Exception("Not a valid log level"); | ||
} | ||
`, | ||
}, | ||
}, | ||
]; | ||
|
||
// Logs Data logic | ||
const MESSAGE_LOG_LEVELS = [ | ||
{ message: 'A simple log', level: '0' }, | ||
{ | ||
message: 'Another log message', | ||
level: '1', | ||
}, | ||
{ | ||
message: 'A log message generated from a warning', | ||
level: '2', | ||
}, | ||
{ message: 'Error with certificate: "ca_trusted_fingerprint"', level: '3' }, | ||
]; | ||
|
||
const scenario: Scenario<LogDocument> = async (runOptions) => { | ||
const { isLogsDb } = parseLogsScenarioOpts(runOptions.scenarioOpts); | ||
return { | ||
bootstrap: async ({ logsEsClient }) => { | ||
await logsEsClient.createCustomPipeline(processors); | ||
if (isLogsDb) await logsEsClient.createIndexTemplate(IndexTemplateName.LogsDb); | ||
|
||
await logsEsClient.updateIndexTemplate( | ||
isLogsDb ? IndexTemplateName.LogsDb : LogsIndex, | ||
(template) => { | ||
const next = { | ||
name: LogsIndex, | ||
data_stream: { | ||
failure_store: true, | ||
}, | ||
}; | ||
|
||
return merge({}, template, next); | ||
} | ||
); | ||
}, | ||
generate: ({ range, clients: { logsEsClient } }) => { | ||
const { logger } = runOptions; | ||
|
||
const constructLogsCommonData = () => { | ||
const index = Math.floor(Math.random() * 3); | ||
const serviceName = getServiceName(index); | ||
const logMessage = MESSAGE_LOG_LEVELS[index]; | ||
const { clusterId, clusterName } = getCluster(index); | ||
const cloudRegion = getCloudRegion(index); | ||
|
||
const commonLongEntryFields: LogDocument = { | ||
'trace.id': generateShortId(), | ||
'agent.name': 'synth-agent', | ||
'orchestrator.cluster.name': clusterName, | ||
'orchestrator.cluster.id': clusterId, | ||
'orchestrator.resource.id': generateShortId(), | ||
'cloud.provider': getCloudProvider(), | ||
'cloud.region': cloudRegion, | ||
'cloud.availability_zone': `${cloudRegion}a`, | ||
'cloud.project.id': generateShortId(), | ||
'cloud.instance.id': generateShortId(), | ||
'log.file.path': `/logs/${generateLongId()}/error.txt`, | ||
}; | ||
|
||
return { | ||
index, | ||
serviceName, | ||
logMessage, | ||
cloudRegion, | ||
commonLongEntryFields, | ||
}; | ||
}; | ||
|
||
const datasetSynth1Logs = (timestamp: number) => { | ||
const { | ||
serviceName, | ||
logMessage: { level, message }, | ||
commonLongEntryFields, | ||
} = constructLogsCommonData(); | ||
|
||
return log | ||
.create({ isLogsDb }) | ||
.dataset('synth.1') | ||
.message(message) | ||
.logLevel(level) | ||
.service(serviceName) | ||
.defaults(commonLongEntryFields) | ||
.timestamp(timestamp); | ||
}; | ||
|
||
const datasetSynth2Logs = (i: number, timestamp: number) => { | ||
const { | ||
serviceName, | ||
logMessage: { level, message }, | ||
commonLongEntryFields, | ||
} = constructLogsCommonData(); | ||
const isFailed = i % 60 === 0; | ||
return log | ||
.create({ isLogsDb }) | ||
.dataset('synth.2') | ||
.message(message) | ||
.logLevel(isFailed ? '4' : level) // "script_exception": Not a valid log level | ||
.service(serviceName) | ||
.defaults(commonLongEntryFields) | ||
.timestamp(timestamp); | ||
}; | ||
|
||
const datasetSynth3Logs = (i: number, timestamp: number) => { | ||
const { | ||
serviceName, | ||
logMessage: { level, message }, | ||
cloudRegion, | ||
commonLongEntryFields, | ||
} = constructLogsCommonData(); | ||
const isMalformed = i % 10 === 0; | ||
const isFailed = i % 80 === 0; | ||
return log | ||
.create({ isLogsDb }) | ||
.dataset('synth.3') | ||
.message(message) | ||
.logLevel(isFailed ? '5' : level) // "script_exception": Not a valid log level | ||
.service(serviceName) | ||
.defaults({ | ||
...commonLongEntryFields, | ||
'cloud.availability_zone': isMalformed | ||
? MORE_THAN_1024_CHARS // "ignore_above": 1024 in mapping | ||
: `${cloudRegion}a`, | ||
}) | ||
.timestamp(timestamp); | ||
}; | ||
|
||
const logs = range | ||
.interval('1m') | ||
.rate(1) | ||
.generator((timestamp) => { | ||
return Array(200) | ||
.fill(0) | ||
.flatMap((_, index) => [ | ||
datasetSynth1Logs(timestamp), | ||
datasetSynth2Logs(index, timestamp), | ||
datasetSynth3Logs(index, timestamp), | ||
]); | ||
}); | ||
|
||
return withClient( | ||
logsEsClient, | ||
logger.perf('generating_logs', () => logs) | ||
); | ||
}, | ||
}; | ||
}; | ||
|
||
export default scenario; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters