Skip to content

Commit

Permalink
BedrockChat & GeminiChat (elastic#186809)
Browse files Browse the repository at this point in the history
## Summary

Adopted `BedrockChat` from `@langchain/community` package that adds
support for tools calling
https://js.langchain.com/v0.2/docs/integrations/chat/bedrock/

Adopted `ChatGoogleGenerativeAI ` from `@langchain/google-genai` package
that adds support for tools calling
https://js.langchain.com/v0.2/docs/integrations/chat/google_generativeai

Hidden behind FF:
`--xpack.securitySolution.enableExperimental=[assistantBedrockChat]`

As of this PR `integration_assistant` is still going to use
`ActionsClientSimpleChatModel`. After the FF will be enabled by default
we will switch `integration_assistant` to use new chat model.

Thank you @stephmilovic a ton 🙇

---------

Co-authored-by: kibanamachine <[email protected]>
Co-authored-by: Steph Milovic <[email protected]>
Co-authored-by: Garrett Spong <[email protected]>
  • Loading branch information
4 people authored Jul 23, 2024
1 parent e12e449 commit 26dd61e
Show file tree
Hide file tree
Showing 40 changed files with 2,012 additions and 212 deletions.
27 changes: 17 additions & 10 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -80,14 +80,15 @@
"resolutions": {
"**/@bazel/typescript/protobufjs": "6.11.4",
"**/@hello-pangea/dnd": "16.6.0",
"**/@langchain/core": "0.2.3",
"**/@langchain/core": "^0.2.17",
"**/@types/node": "20.10.5",
"**/@typescript-eslint/utils": "5.62.0",
"**/chokidar": "^3.5.3",
"**/d3-scale/**/d3-color": "npm:@elastic/[email protected]",
"**/globule/minimatch": "^3.1.2",
"**/hoist-non-react-statics": "^3.3.2",
"**/isomorphic-fetch/node-fetch": "^2.6.7",
"**/langchain": "^0.2.10",
"**/react-intl/**/@types/react": "^17.0.45",
"**/remark-parse/trim": "1.0.1",
"**/sharp": "0.32.6",
Expand All @@ -96,6 +97,8 @@
},
"dependencies": {
"@appland/sql-parser": "^1.5.1",
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-crypto/util": "^5.2.0",
"@babel/runtime": "^7.24.7",
"@cfworker/json-schema": "^1.12.7",
"@dnd-kit/core": "^6.1.0",
Expand Down Expand Up @@ -132,6 +135,7 @@
"@formatjs/intl-relativetimeformat": "^11.2.12",
"@formatjs/intl-utils": "^3.8.4",
"@formatjs/ts-transformer": "^3.13.14",
"@google/generative-ai": "^0.7.0",
"@grpc/grpc-js": "^1.8.22",
"@hapi/accept": "^5.0.2",
"@hapi/boom": "^9.1.4",
Expand Down Expand Up @@ -940,10 +944,11 @@
"@kbn/xstate-utils": "link:packages/kbn-xstate-utils",
"@kbn/zod": "link:packages/kbn-zod",
"@kbn/zod-helpers": "link:packages/kbn-zod-helpers",
"@langchain/community": "^0.2.4",
"@langchain/core": "0.2.3",
"@langchain/langgraph": "^0.0.23",
"@langchain/openai": "^0.0.34",
"@langchain/community": "0.2.18",
"@langchain/core": "^0.2.17",
"@langchain/google-genai": "^0.0.23",
"@langchain/langgraph": "^0.0.29",
"@langchain/openai": "^0.1.3",
"@langtrase/trace-attributes": "^3.0.8",
"@launchdarkly/node-server-sdk": "^9.4.7",
"@loaders.gl/core": "^3.4.7",
Expand All @@ -966,9 +971,11 @@
"@paralleldrive/cuid2": "^2.2.2",
"@reduxjs/toolkit": "1.9.7",
"@slack/webhook": "^7.0.1",
"@smithy/eventstream-codec": "^3.0.0",
"@smithy/eventstream-serde-node": "^3.0.0",
"@smithy/types": "^3.0.0",
"@smithy/eventstream-codec": "^3.1.1",
"@smithy/eventstream-serde-node": "^3.0.3",
"@smithy/protocol-http": "^4.0.2",
"@smithy/signature-v4": "^3.1.1",
"@smithy/types": "^3.2.0",
"@smithy/util-utf8": "^3.0.0",
"@tanstack/react-query": "^4.29.12",
"@tanstack/react-query-devtools": "^4.29.12",
Expand Down Expand Up @@ -1082,8 +1089,8 @@
"jsonwebtoken": "^9.0.2",
"jsts": "^1.6.2",
"kea": "^2.6.0",
"langchain": "0.2.3",
"langsmith": "^0.1.30",
"langchain": "^0.2.10",
"langsmith": "^0.1.37",
"launchdarkly-js-client-sdk": "^3.4.0",
"launchdarkly-node-server-sdk": "^7.0.3",
"load-json-file": "^6.2.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,5 @@ export type AssistantFeatureKey = keyof AssistantFeatures;
export const defaultAssistantFeatures = Object.freeze({
assistantKnowledgeBaseByDefault: false,
assistantModelEvaluation: false,
assistantBedrockChat: false,
});
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { z } from 'zod';

export type GetCapabilitiesResponse = z.infer<typeof GetCapabilitiesResponse>;
export const GetCapabilitiesResponse = z.object({
assistantBedrockChat: z.boolean(),
assistantKnowledgeBaseByDefault: z.boolean(),
assistantModelEvaluation: z.boolean(),
});
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,14 @@ paths:
schema:
type: object
properties:
assistantBedrockChat:
type: boolean
assistantKnowledgeBaseByDefault:
type: boolean
assistantModelEvaluation:
type: boolean
required:
- assistantBedrockChat
- assistantKnowledgeBaseByDefault
- assistantModelEvaluation
'400':
Expand Down
4 changes: 4 additions & 0 deletions x-pack/packages/kbn-langchain/server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
* 2.0.
*/

import { ActionsClientBedrockChatModel } from './language_models/bedrock_chat';
import { ActionsClientChatOpenAI } from './language_models/chat_openai';
import { ActionsClientLlm } from './language_models/llm';
import { ActionsClientSimpleChatModel } from './language_models/simple_chat_model';
import { ActionsClientGeminiChatModel } from './language_models/gemini_chat';
import { parseBedrockStream } from './utils/bedrock';
import { parseGeminiResponse } from './utils/gemini';
import { getDefaultArguments } from './language_models/constants';
Expand All @@ -16,7 +18,9 @@ export {
parseBedrockStream,
parseGeminiResponse,
getDefaultArguments,
ActionsClientBedrockChatModel,
ActionsClientChatOpenAI,
ActionsClientGeminiChatModel,
ActionsClientLlm,
ActionsClientSimpleChatModel,
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { BedrockChat as _BedrockChat } from '@langchain/community/chat_models/bedrock/web';
import type { ActionsClient } from '@kbn/actions-plugin/server';
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models';
import { Logger } from '@kbn/logging';
import { Readable } from 'stream';
import { PublicMethodsOf } from '@kbn/utility-types';

export const DEFAULT_BEDROCK_MODEL = 'anthropic.claude-3-5-sonnet-20240620-v1:0';
export const DEFAULT_BEDROCK_REGION = 'us-east-1';

export interface CustomChatModelInput extends BaseChatModelParams {
actionsClient: PublicMethodsOf<ActionsClient>;
connectorId: string;
logger: Logger;
temperature?: number;
signal?: AbortSignal;
model?: string;
maxTokens?: number;
}

export class ActionsClientBedrockChatModel extends _BedrockChat {
constructor({ actionsClient, connectorId, logger, ...params }: CustomChatModelInput) {
super({
...params,
credentials: { accessKeyId: '', secretAccessKey: '' },
// only needed to force BedrockChat to use messages api for Claude v2
model: params.model ?? DEFAULT_BEDROCK_MODEL,
region: DEFAULT_BEDROCK_REGION,
fetchFn: async (url, options) => {
const inputBody = JSON.parse(options?.body as string);

if (this.streaming && !inputBody.tools?.length) {
const data = (await actionsClient.execute({
actionId: connectorId,
params: {
subAction: 'invokeStream',
subActionParams: {
messages: inputBody.messages,
temperature: params.temperature ?? inputBody.temperature,
stopSequences: inputBody.stop_sequences,
system: inputBody.system,
maxTokens: params.maxTokens ?? inputBody.max_tokens,
tools: inputBody.tools,
anthropicVersion: inputBody.anthropic_version,
},
},
})) as { data: Readable };

return {
body: Readable.toWeb(data.data),
} as unknown as Response;
}

const data = (await actionsClient.execute({
actionId: connectorId,
params: {
subAction: 'invokeAIRaw',
subActionParams: {
messages: inputBody.messages,
temperature: params.temperature ?? inputBody.temperature,
stopSequences: inputBody.stop_sequences,
system: inputBody.system,
maxTokens: params.maxTokens ?? inputBody.max_tokens,
tools: inputBody.tools,
anthropicVersion: inputBody.anthropic_version,
},
},
})) as { status: string; data: { message: string } };

return {
ok: data.status === 'ok',
json: () => data.data,
} as unknown as Response;
},
});
}
}
Loading

0 comments on commit 26dd61e

Please sign in to comment.