Skip to content

Commit

Permalink
[8.x] [Obs AI Assistant] Fix alerts function (#203695) (#203825)
Browse files Browse the repository at this point in the history
# Backport

This will backport the following commits from `main` to `8.x`:
- [[Obs AI Assistant] Fix alerts function
(#203695)](#203695)

<!--- Backport version: 9.4.3 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Viduni
Wickramarachchi","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-12-11T15:25:36Z","message":"[Obs
AI Assistant] Fix alerts function (#203695)\n\n## Summary\r\n\r\n###
Problem\r\nWith the merge of the PR
https://github.com/elastic/kibana/pull/183756,\r\nthe alerts function
has stopped working in the Obs AI Assistant, because\r\nthere has been a
change to the query (when finding alerts)\r\n\r\n### Solution\r\nRevert
the change made to the query.\r\n\r\n### Checklist\r\n\r\n- [x] The PR
description includes the appropriate Release Notes section,\r\nand the
correct `release_note:*` label is applied per
the\r\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)","sha":"d9c1cd30af82abf12b5d569cbaa8886fbb78a5bb","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:fix","v9.0.0","Team:Obs
AI
Assistant","ci:project-deploy-observability","backport:version","v8.18.0"],"title":"[Obs
AI Assistant] Fix alerts
function","number":203695,"url":"https://github.com/elastic/kibana/pull/203695","mergeCommit":{"message":"[Obs
AI Assistant] Fix alerts function (#203695)\n\n## Summary\r\n\r\n###
Problem\r\nWith the merge of the PR
https://github.com/elastic/kibana/pull/183756,\r\nthe alerts function
has stopped working in the Obs AI Assistant, because\r\nthere has been a
change to the query (when finding alerts)\r\n\r\n### Solution\r\nRevert
the change made to the query.\r\n\r\n### Checklist\r\n\r\n- [x] The PR
description includes the appropriate Release Notes section,\r\nand the
correct `release_note:*` label is applied per
the\r\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)","sha":"d9c1cd30af82abf12b5d569cbaa8886fbb78a5bb"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/203695","number":203695,"mergeCommit":{"message":"[Obs
AI Assistant] Fix alerts function (#203695)\n\n## Summary\r\n\r\n###
Problem\r\nWith the merge of the PR
https://github.com/elastic/kibana/pull/183756,\r\nthe alerts function
has stopped working in the Obs AI Assistant, because\r\nthere has been a
change to the query (when finding alerts)\r\n\r\n### Solution\r\nRevert
the change made to the query.\r\n\r\n### Checklist\r\n\r\n- [x] The PR
description includes the appropriate Release Notes section,\r\nand the
correct `release_note:*` label is applied per
the\r\n[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)","sha":"d9c1cd30af82abf12b5d569cbaa8886fbb78a5bb"}},{"branch":"8.x","label":"v8.18.0","branchLabelMappingKey":"^v8.18.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->

Co-authored-by: Viduni Wickramarachchi <[email protected]>
  • Loading branch information
kibanamachine and viduni94 authored Dec 11, 2024
1 parent 4eb0b23 commit 726efd3
Show file tree
Hide file tree
Showing 3 changed files with 84 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -196,17 +196,17 @@ export function registerAlertsFunction({
lte: end,
},
},
...kqlQuery,
...(!includeRecovered
? [
{
term: {
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
},
},
]
: []),
},
...kqlQuery,
...(!includeRecovered
? [
{
term: {
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
},
},
]
: []),
],
},
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,57 +137,6 @@ export default function ApiTest({ getService }: FtrProviderContext) {
]);
});

it.skip('returns a useful error if the request fails', async () => {
const interceptor = proxy.intercept('conversation', () => true);

const passThrough = new PassThrough();

supertest
.post(CHAT_API_URL)
.set('kbn-xsrf', 'foo')
.send({
name: 'my_api_call',
messages,
connectorId,
functions: [],
scopes: ['all'],
})
.expect(200)
.pipe(passThrough);

let data: string = '';

passThrough.on('data', (chunk) => {
data += chunk.toString('utf-8');
});

const simulator = await interceptor.waitForIntercept();

await simulator.status(400);

await simulator.rawWrite(
JSON.stringify({
error: {
code: 'context_length_exceeded',
message:
"This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.",
param: 'messages',
type: 'invalid_request_error',
},
})
);

await simulator.rawEnd();

await new Promise<void>((resolve) => passThrough.on('end', () => resolve()));

const response = JSON.parse(data.trim());

expect(response.error.message).to.be(
`Token limit reached. Token limit is 8192, but the current conversation has 11036 tokens.`
);
});

describe('security roles and access privileges', () => {
it('should deny access for users without the ai_assistant privilege', async () => {
try {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { MessageRole, MessageAddEvent } from '@kbn/observability-ai-assistant-plugin/common';
import expect from '@kbn/expect';
import { LlmProxy, createLlmProxy } from '../../../common/create_llm_proxy';
import { FtrProviderContext } from '../../../common/ftr_provider_context';
import { getMessageAddedEvents, invokeChatCompleteWithFunctionRequest } from './helpers';
import {
createProxyActionConnector,
deleteActionConnector,
} from '../../../common/action_connectors';

export default function ApiTest({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const log = getService('log');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');

describe('when calling the alerts function', () => {
let proxy: LlmProxy;
let connectorId: string;
let alertsEvents: MessageAddEvent[];

const start = 'now-100h';
const end = 'now';

before(async () => {
proxy = await createLlmProxy(log);
connectorId = await createProxyActionConnector({ supertest, log, port: proxy.getPort() });

void proxy
.intercept('conversation', () => true, 'Hello from LLM Proxy')
.completeAfterIntercept();

const alertsResponseBody = await invokeChatCompleteWithFunctionRequest({
connectorId,
observabilityAIAssistantAPIClient,
functionCall: {
name: 'alerts',
trigger: MessageRole.Assistant,
arguments: JSON.stringify({ start, end }),
},
});

await proxy.waitForAllInterceptorsSettled();

alertsEvents = getMessageAddedEvents(alertsResponseBody);
});

after(async () => {
proxy.close();
await deleteActionConnector({ supertest, connectorId, log });
});

// This test ensures that invoking the alerts function does not result in an error.
it('should execute the function without any errors', async () => {
const alertsFunctionResponse = alertsEvents[0];
expect(alertsFunctionResponse.message.message.name).to.be('alerts');

const parsedAlertsResponse = JSON.parse(alertsFunctionResponse.message.message.content!);

expect(parsedAlertsResponse).not.to.have.property('error');
expect(parsedAlertsResponse).to.have.property('total');
expect(parsedAlertsResponse).to.have.property('alerts');
expect(parsedAlertsResponse.alerts).to.be.an('array');
expect(parsedAlertsResponse.total).to.be(0);
expect(parsedAlertsResponse.alerts.length).to.be(0);
});
});
}

0 comments on commit 726efd3

Please sign in to comment.