Skip to content

Commit

Permalink
[8.x] [Epic] AI Insights + Assistant - Add "Other" option t…
Browse files Browse the repository at this point in the history
…o the existing OpenAI Connector dropdown list (elastic#8936) (elastic#194831) (elastic#195688)

# Backport

This will backport the following commits from `main` to `8.x`:
- [[Epic] AI Insights + Assistant - Add "Other" option to the
existing OpenAI Connector dropdown list (elastic#8936)
(elastic#194831)](elastic#194831)

<!--- Backport version: 9.4.3 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Ievgen
Sorokopud","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-10-09T22:07:31Z","message":"[Epic]
AI Insights + Assistant - Add \"Other\" option to the existing OpenAI
Connector dropdown list (elastic#8936)
(elastic#194831)","sha":"83a701e837a7a84a86dcc8d359154f900f69676a","branchLabelMapping":{"^v9.0.0$":"main","^v8.16.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["v9.0.0","release_note:feature","Feature:Security
Assistant","Team:Security Generative
AI","v8.16.0","backport:version"],"title":"[Epic] AI Insights +
Assistant - Add \"Other\" option to the existing OpenAI Connector
dropdown list
(elastic#8936)","number":194831,"url":"https://github.com/elastic/kibana/pull/194831","mergeCommit":{"message":"[Epic]
AI Insights + Assistant - Add \"Other\" option to the existing OpenAI
Connector dropdown list (elastic#8936)
(elastic#194831)","sha":"83a701e837a7a84a86dcc8d359154f900f69676a"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/194831","number":194831,"mergeCommit":{"message":"[Epic]
AI Insights + Assistant - Add \"Other\" option to the existing OpenAI
Connector dropdown list (elastic#8936)
(elastic#194831)","sha":"83a701e837a7a84a86dcc8d359154f900f69676a"}},{"branch":"8.x","label":"v8.16.0","branchLabelMappingKey":"^v8.16.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->

Co-authored-by: Ievgen Sorokopud <[email protected]>
  • Loading branch information
kibanamachine and e40pud authored Oct 9, 2024
1 parent ce302a9 commit 0035e94
Show file tree
Hide file tree
Showing 37 changed files with 915 additions and 32 deletions.
1 change: 1 addition & 0 deletions oas_docs/output/kibana.serverless.staging.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22906,6 +22906,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other
type: string
Security_AI_Assistant_API_Reader:
additionalProperties: true
Expand Down
1 change: 1 addition & 0 deletions oas_docs/output/kibana.serverless.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22906,6 +22906,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other
type: string
Security_AI_Assistant_API_Reader:
additionalProperties: true
Expand Down
1 change: 1 addition & 0 deletions oas_docs/output/kibana.staging.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30731,6 +30731,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other
type: string
Security_AI_Assistant_API_Reader:
additionalProperties: true
Expand Down
1 change: 1 addition & 0 deletions oas_docs/output/kibana.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30731,6 +30731,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other
type: string
Security_AI_Assistant_API_Reader:
additionalProperties: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1194,6 +1194,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other
type: string
Reader:
additionalProperties: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1194,6 +1194,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other
type: string
Reader:
additionalProperties: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ export const Reader = z.object({}).catchall(z.unknown());
* Provider
*/
export type Provider = z.infer<typeof Provider>;
export const Provider = z.enum(['OpenAI', 'Azure OpenAI']);
export const Provider = z.enum(['OpenAI', 'Azure OpenAI', 'Other']);
export type ProviderEnum = typeof Provider.enum;
export const ProviderEnum = Provider.enum;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ components:
enum:
- OpenAI
- Azure OpenAI
- Other

MessageRole:
type: string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { PRECONFIGURED_CONNECTOR } from './translations';
enum OpenAiProviderType {
OpenAi = 'OpenAI',
AzureAi = 'Azure OpenAI',
Other = 'Other',
}

interface GenAiConfig {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1025,15 +1025,17 @@ describe('actions telemetry', () => {
'.d3security': 2,
'.gen-ai__Azure OpenAI': 3,
'.gen-ai__OpenAI': 1,
'.gen-ai__Other': 1,
};
const { countByType, countGenAiProviderTypes } = getCounts(aggs);
expect(countByType).toEqual({
__d3security: 2,
'__gen-ai': 4,
'__gen-ai': 5,
});
expect(countGenAiProviderTypes).toEqual({
'Azure OpenAI': 3,
OpenAI: 1,
Other: 1,
});
});
});
1 change: 1 addition & 0 deletions x-pack/plugins/actions/server/usage/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ export const byGenAiProviderTypeSchema: MakeSchemaFrom<ActionsUsage>['count_by_t
// Known providers:
['Azure OpenAI']: { type: 'long' },
['OpenAI']: { type: 'long' },
['Other']: { type: 'long' },
};

export const byServiceProviderTypeSchema: MakeSchemaFrom<ActionsUsage>['count_active_email_connectors_by_service_type'] =
Expand Down
12 changes: 12 additions & 0 deletions x-pack/plugins/elastic_assistant/server/routes/utils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,5 +65,17 @@ describe('Utils', () => {
const isOpenModel = isOpenSourceModel(connector);
expect(isOpenModel).toEqual(true);
});

it('should return `true` when apiProvider of OpenAiProviderType.Other is specified', async () => {
const connector = {
actionTypeId: '.gen-ai',
config: {
apiUrl: OPENAI_CHAT_URL,
apiProvider: OpenAiProviderType.Other,
},
} as unknown as Connector;
const isOpenModel = isOpenSourceModel(connector);
expect(isOpenModel).toEqual(true);
});
});
});
26 changes: 16 additions & 10 deletions x-pack/plugins/elastic_assistant/server/routes/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -203,19 +203,25 @@ export const isOpenSourceModel = (connector?: Connector): boolean => {
}

const llmType = getLlmType(connector.actionTypeId);
const connectorApiUrl = connector.config?.apiUrl
? (connector.config.apiUrl as string)
: undefined;
const isOpenAiType = llmType === 'openai';

if (!isOpenAiType) {
return false;
}
const connectorApiProvider = connector.config?.apiProvider
? (connector.config?.apiProvider as OpenAiProviderType)
: undefined;
if (connectorApiProvider === OpenAiProviderType.Other) {
return true;
}

const isOpenAiType = llmType === 'openai';
const isOpenAI =
isOpenAiType &&
(!connectorApiUrl ||
connectorApiUrl === OPENAI_CHAT_URL ||
connectorApiProvider === OpenAiProviderType.AzureAi);
const connectorApiUrl = connector.config?.apiUrl
? (connector.config.apiUrl as string)
: undefined;

return isOpenAiType && !isOpenAI;
return (
!!connectorApiUrl &&
connectorApiUrl !== OPENAI_CHAT_URL &&
connectorApiProvider !== OpenAiProviderType.AzureAi
);
};
1 change: 1 addition & 0 deletions x-pack/plugins/search_playground/common/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ export enum APIRoutes {
export enum LLMs {
openai = 'openai',
openai_azure = 'openai_azure',
openai_other = 'openai_other',
bedrock = 'bedrock',
gemini = 'gemini',
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@ jest.mock('./use_load_connectors', () => ({
}));

const mockConnectors = [
{ id: 'connectorId1', title: 'OpenAI Connector', type: LLMs.openai },
{ id: 'connectorId2', title: 'OpenAI Azure Connector', type: LLMs.openai_azure },
{ id: 'connectorId2', title: 'Bedrock Connector', type: LLMs.bedrock },
{ id: 'connectorId1', name: 'OpenAI Connector', type: LLMs.openai },
{ id: 'connectorId2', name: 'OpenAI Azure Connector', type: LLMs.openai_azure },
{ id: 'connectorId2', name: 'Bedrock Connector', type: LLMs.bedrock },
{ id: 'connectorId3', name: 'OpenAI OSS Model Connector', type: LLMs.openai_other },
];
const mockUseLoadConnectors = (data: any) => {
(useLoadConnectors as jest.Mock).mockReturnValue({ data });
Expand All @@ -36,7 +37,7 @@ describe('useLLMsModels Hook', () => {
expect(result.current).toEqual([
{
connectorId: 'connectorId1',
connectorName: undefined,
connectorName: 'OpenAI Connector',
connectorType: LLMs.openai,
disabled: false,
icon: expect.any(Function),
Expand All @@ -48,7 +49,7 @@ describe('useLLMsModels Hook', () => {
},
{
connectorId: 'connectorId1',
connectorName: undefined,
connectorName: 'OpenAI Connector',
connectorType: LLMs.openai,
disabled: false,
icon: expect.any(Function),
Expand All @@ -60,7 +61,7 @@ describe('useLLMsModels Hook', () => {
},
{
connectorId: 'connectorId1',
connectorName: undefined,
connectorName: 'OpenAI Connector',
connectorType: LLMs.openai,
disabled: false,
icon: expect.any(Function),
Expand All @@ -72,19 +73,19 @@ describe('useLLMsModels Hook', () => {
},
{
connectorId: 'connectorId2',
connectorName: undefined,
connectorName: 'OpenAI Azure Connector',
connectorType: LLMs.openai_azure,
disabled: false,
icon: expect.any(Function),
id: 'connectorId2Azure OpenAI ',
name: 'Azure OpenAI ',
id: 'connectorId2OpenAI Azure Connector (Azure OpenAI)',
name: 'OpenAI Azure Connector (Azure OpenAI)',
showConnectorName: false,
value: undefined,
promptTokenLimit: undefined,
},
{
connectorId: 'connectorId2',
connectorName: undefined,
connectorName: 'Bedrock Connector',
connectorType: LLMs.bedrock,
disabled: false,
icon: expect.any(Function),
Expand All @@ -96,7 +97,7 @@ describe('useLLMsModels Hook', () => {
},
{
connectorId: 'connectorId2',
connectorName: undefined,
connectorName: 'Bedrock Connector',
connectorType: LLMs.bedrock,
disabled: false,
icon: expect.any(Function),
Expand All @@ -106,6 +107,18 @@ describe('useLLMsModels Hook', () => {
value: 'anthropic.claude-3-5-sonnet-20240620-v1:0',
promptTokenLimit: 200000,
},
{
connectorId: 'connectorId3',
connectorName: 'OpenAI OSS Model Connector',
connectorType: LLMs.openai_other,
disabled: false,
icon: expect.any(Function),
id: 'connectorId3OpenAI OSS Model Connector (OpenAI Compatible Service)',
name: 'OpenAI OSS Model Connector (OpenAI Compatible Service)',
showConnectorName: false,
value: undefined,
promptTokenLimit: undefined,
},
]);
});

Expand Down
17 changes: 14 additions & 3 deletions x-pack/plugins/search_playground/public/hooks/use_llms_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,22 @@ const mapLlmToModels: Record<
},
[LLMs.openai_azure]: {
icon: OpenAILogo,
getModels: (connectorName, includeName) => [
getModels: (connectorName) => [
{
label: i18n.translate('xpack.searchPlayground.openAIAzureModel', {
defaultMessage: 'Azure OpenAI {name}',
values: { name: includeName ? `(${connectorName})` : '' },
defaultMessage: '{name} (Azure OpenAI)',
values: { name: connectorName },
}),
},
],
},
[LLMs.openai_other]: {
icon: OpenAILogo,
getModels: (connectorName) => [
{
label: i18n.translate('xpack.searchPlayground.otherOpenAIModel', {
defaultMessage: '{name} (OpenAI Compatible Service)',
values: { name: connectorName },
}),
},
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,12 @@ describe('useLoadConnectors', () => {
actionTypeId: '.bedrock',
isMissingSecrets: false,
},
{
id: '5',
actionTypeId: '.gen-ai',
isMissingSecrets: false,
config: { apiProvider: OpenAiProviderType.Other },
},
];
mockedLoadConnectors.mockResolvedValue(connectors);

Expand Down Expand Up @@ -106,6 +112,16 @@ describe('useLoadConnectors', () => {
title: 'Bedrock',
type: 'bedrock',
},
{
actionTypeId: '.gen-ai',
config: {
apiProvider: 'Other',
},
id: '5',
isMissingSecrets: false,
title: 'OpenAI Other',
type: 'openai_other',
},
]);
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,20 @@ const connectorTypeToLLM: Array<{
type: LLMs.openai,
}),
},
{
actionId: OPENAI_CONNECTOR_ID,
actionProvider: OpenAiProviderType.Other,
match: (connector) =>
connector.actionTypeId === OPENAI_CONNECTOR_ID &&
(connector as OpenAIConnector)?.config?.apiProvider === OpenAiProviderType.Other,
transform: (connector) => ({
...connector,
title: i18n.translate('xpack.searchPlayground.openAIOtherConnectorTitle', {
defaultMessage: 'OpenAI Other',
}),
type: LLMs.openai_other,
}),
},
{
actionId: BEDROCK_CONNECTOR_ID,
match: (connector) => connector.actionTypeId === BEDROCK_CONNECTOR_ID,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,4 +152,41 @@ describe('getChatParams', () => {
)
).rejects.toThrow('Invalid connector id');
});

it('returns the correct chat model and uses the default model when not specified in the params', async () => {
mockActionsClient.get.mockResolvedValue({
id: '2',
actionTypeId: OPENAI_CONNECTOR_ID,
config: { defaultModel: 'local' },
});

const result = await getChatParams(
{
connectorId: '2',
prompt: 'How does it work?',
citations: false,
},
{ actions, request, logger }
);

expect(Prompt).toHaveBeenCalledWith('How does it work?', {
citations: false,
context: true,
type: 'openai',
});
expect(QuestionRewritePrompt).toHaveBeenCalledWith({
type: 'openai',
});
expect(ActionsClientChatOpenAI).toHaveBeenCalledWith({
logger: expect.anything(),
model: 'local',
connectorId: '2',
actionsClient: expect.anything(),
signal: expect.anything(),
traceId: 'test-uuid',
temperature: 0.2,
maxRetries: 0,
});
expect(result.chatPrompt).toContain('How does it work?');
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ export const getChatParams = async (
actionsClient,
logger,
connectorId,
model,
model: model || connector?.config?.defaultModel,
traceId: uuidv4(),
signal: abortSignal,
temperature: getDefaultArguments().temperature,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { isEmpty } from 'lodash/fp';
enum OpenAiProviderType {
OpenAi = 'OpenAI',
AzureAi = 'Azure OpenAI',
Other = 'Other',
}

interface GenAiConfig {
Expand Down
1 change: 1 addition & 0 deletions x-pack/plugins/stack_connectors/common/openai/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export enum SUB_ACTION {
export enum OpenAiProviderType {
OpenAi = 'OpenAI',
AzureAi = 'Azure OpenAI',
Other = 'Other',
}

export const DEFAULT_TIMEOUT_MS = 120000;
Expand Down
Loading

0 comments on commit 0035e94

Please sign in to comment.