From bc466ea738107b00a8dbba216630369ff729d729 Mon Sep 17 00:00:00 2001
From: Samiul Monir <150824886+Samiul-TheSoccerFan@users.noreply.github.com>
Date: Tue, 24 Dec 2024 22:23:25 -0500
Subject: [PATCH] [Search] Add Inference endpoint Flyout in Inference
Management UI (#203204)
## Summary
This PR includes
- Create a UI component package to share AI connector form between
multiple plugins
- Integrate the packaged components into the `Search Inference Endpoint`
plugin.
https://github.com/user-attachments/assets/2b447b44-3d1d-4422-b76d-8d8fd160b2bc
### Checklist
Check the PR satisfies following conditions.
Reviewers should verify this PR satisfies this list as well.
- [X] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [X] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [X] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Elastic Machine
---
.github/CODEOWNERS | 1 +
package.json | 1 +
tsconfig.base.json | 2 +
x-pack/.i18nrc.json | 1 +
.../README.md | 7 +
.../kbn-inference-endpoint-ui-common/index.ts | 10 +
.../jest.config.js | 12 +
.../kibana.jsonc | 12 +
.../package.json | 6 +
.../setup_tests.ts | 9 +
.../components/additional_options_fields.tsx | 290 ++++++++
.../configuration/configuration_field.tsx | 228 ++++++
.../configuration_form_items.tsx | 89 +++
.../configuration/configuration_utils.test.ts | 62 ++
.../configuration/configuration_utils.ts | 49 ++
.../provider_config_hidden_field.tsx | 41 ++
.../provider_secret_hidden_field.tsx | 42 ++
.../inference_service_form_fields.test.tsx | 188 +++++
.../inference_service_form_fields.tsx | 375 ++++++++++
.../providers/assets/images/alibaba_cloud.svg | 3 +
.../assets/images/amazon_bedrock.svg | 11 +
.../providers/assets/images/anthropic.svg | 3 +
.../assets/images/azure_ai_studio.svg | 44 ++
.../providers/assets/images/azure_open_ai.svg | 9 +
.../providers/assets/images/cohere.svg | 9 +
.../providers/assets/images/elastic.svg | 16 +
.../assets/images/google_ai_studio.svg | 6 +
.../providers/assets/images/hugging_face.svg | 10 +
.../providers/assets/images/ibm_watsonx.svg | 3 +
.../providers/assets/images/mistral.svg | 34 +
.../providers/assets/images/open_ai.svg | 3 +
.../service_provider.test.tsx | 42 ++
.../service_provider.tsx | 125 ++++
.../components/providers/selectable.test.tsx | 75 ++
.../src/components/providers/selectable.tsx | 133 ++++
.../src/constants.ts | 24 +
.../src/translations.ts | 129 ++++
.../src/types/types.ts | 51 ++
.../src/utils/helpers.ts | 80 +++
.../tsconfig.json | 26 +
.../common/translations.ts | 7 +
.../common/types.ts | 12 +-
.../search_inference_endpoints/kibana.jsonc | 3 +-
.../add_inference_flyout_wrapper.test.tsx | 87 +++
.../add_inference_flyout_wrapper.tsx | 66 ++
.../inference_form.tsx | 69 ++
.../add_inference_endpoints/translations.ts | 36 +
.../public/components/inference_endpoints.tsx | 9 +-
.../components/inference_endpoints_header.tsx | 18 +-
.../public/hooks/translations.ts | 14 +
.../public/hooks/use_add_endpoint.test.tsx | 101 +++
.../public/hooks/use_add_endpoint.ts | 55 ++
.../public/hooks/use_providers.ts | 644 +++++++++++++++++
.../public/utils/test_utils/test_utils.ts | 652 ++++++++++++++++++
.../server/lib/add_inference_endpoint.test.ts | 55 ++
.../server/lib/add_inference_endpoint.ts | 47 ++
.../lib/fetch_inference_services.test.ts | 37 +
.../server/lib/fetch_inference_services.ts | 26 +
.../server/routes.ts | 42 +-
.../server/utils/unflatten_object.ts | 17 +
.../search_inference_endpoints/tsconfig.json | 7 +-
.../svl_search_inference_management_page.ts | 22 +
.../search/inference_management.ts | 6 +
yarn.lock | 4 +
64 files changed, 4288 insertions(+), 9 deletions(-)
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/README.md
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/jest.config.js
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/kibana.jsonc
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/package.json
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/setup_tests.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/additional_options_fields.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_field.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_form_items.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.test.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_config_hidden_field.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_secret_hidden_field.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/alibaba_cloud.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/amazon_bedrock.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/anthropic.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_ai_studio.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_open_ai.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/cohere.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/elastic.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/google_ai_studio.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/hugging_face.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/ibm_watsonx.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/mistral.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/open_ai.svg
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.test.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.test.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/constants.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/helpers.ts
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/tsconfig.json
create mode 100644 x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.test.tsx
create mode 100644 x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.tsx
create mode 100644 x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/inference_form.tsx
create mode 100644 x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/translations.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/public/hooks/use_add_endpoint.test.tsx
create mode 100644 x-pack/plugins/search_inference_endpoints/public/hooks/use_add_endpoint.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/public/hooks/use_providers.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/public/utils/test_utils/test_utils.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.test.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.test.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.ts
create mode 100644 x-pack/plugins/search_inference_endpoints/server/utils/unflatten_object.ts
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 89e94f4b35028..79a41f54e683c 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -823,6 +823,7 @@ x-pack/platform/packages/shared/kbn-data-forge @elastic/obs-ux-management-team
x-pack/platform/packages/shared/kbn-elastic-assistant @elastic/security-generative-ai
x-pack/platform/packages/shared/kbn-elastic-assistant-common @elastic/security-generative-ai
x-pack/platform/packages/shared/kbn-entities-schema @elastic/obs-entities
+x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common @elastic/response-ops @elastic/appex-ai-infra @elastic/obs-ai-assistant @elastic/security-generative-ai
x-pack/platform/packages/shared/kbn-langchain @elastic/security-generative-ai
x-pack/platform/packages/shared/kbn-slo-schema @elastic/obs-ux-management-team
x-pack/platform/packages/shared/ml/aiops_common @elastic/ml-ui
diff --git a/package.json b/package.json
index 9eec909bc4f4a..03ef2f17fe63e 100644
--- a/package.json
+++ b/package.json
@@ -577,6 +577,7 @@
"@kbn/index-management-shared-types": "link:x-pack/platform/packages/shared/index-management/index_management_shared_types",
"@kbn/index-patterns-test-plugin": "link:test/plugin_functional/plugins/index_patterns",
"@kbn/inference-common": "link:x-pack/platform/packages/shared/ai-infra/inference-common",
+ "@kbn/inference-endpoint-ui-common": "link:x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common",
"@kbn/inference-plugin": "link:x-pack/platform/plugins/shared/inference",
"@kbn/inference_integration_flyout": "link:x-pack/platform/packages/private/ml/inference_integration_flyout",
"@kbn/infra-forge": "link:x-pack/platform/packages/private/kbn-infra-forge",
diff --git a/tsconfig.base.json b/tsconfig.base.json
index 51938b7593019..f0968d3896a68 100644
--- a/tsconfig.base.json
+++ b/tsconfig.base.json
@@ -1064,6 +1064,8 @@
"@kbn/inference_integration_flyout/*": ["x-pack/platform/packages/private/ml/inference_integration_flyout/*"],
"@kbn/inference-common": ["x-pack/platform/packages/shared/ai-infra/inference-common"],
"@kbn/inference-common/*": ["x-pack/platform/packages/shared/ai-infra/inference-common/*"],
+ "@kbn/inference-endpoint-ui-common": ["x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common"],
+ "@kbn/inference-endpoint-ui-common/*": ["x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/*"],
"@kbn/inference-plugin": ["x-pack/platform/plugins/shared/inference"],
"@kbn/inference-plugin/*": ["x-pack/platform/plugins/shared/inference/*"],
"@kbn/infra-forge": ["x-pack/platform/packages/private/kbn-infra-forge"],
diff --git a/x-pack/.i18nrc.json b/x-pack/.i18nrc.json
index 0cac84a98693b..9e54ef70f1132 100644
--- a/x-pack/.i18nrc.json
+++ b/x-pack/.i18nrc.json
@@ -59,6 +59,7 @@
"xpack.idxMgmt": "platform/plugins/shared/index_management",
"xpack.idxMgmtPackage": "packages/index-management",
"xpack.indexLifecycleMgmt": "platform/plugins/private/index_lifecycle_management",
+ "xpack.inferenceEndpointUICommon": "platform/packages/shared/kbn-inference-endpoint-ui-common",
"xpack.infra": "solutions/observability/plugins/infra",
"xpack.logsDataAccess": "platform/plugins/shared/logs_data_access",
"xpack.logsExplorer": "solutions/observability/plugins/logs_explorer",
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/README.md b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/README.md
new file mode 100644
index 0000000000000..206267522f29d
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/README.md
@@ -0,0 +1,7 @@
+# @kbn/kbn-inference-endpoint-ui-common
+
+The `Inference Endpoint UI common` is a shared UI library to create AI Connector and/or inference endpoints.
+
+This package provides:
+
+- Components for rendering the GenAI services and their associated fields
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts
new file mode 100644
index 0000000000000..a2abc5514bd05
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts
@@ -0,0 +1,10 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+export { InferenceServiceFormFields } from './src/components/inference_service_form_fields';
+
+export * from './src/types/types';
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/jest.config.js b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/jest.config.js
new file mode 100644
index 0000000000000..6e305551c279a
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/jest.config.js
@@ -0,0 +1,12 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+module.exports = {
+ preset: '@kbn/test',
+ rootDir: '../../../../..',
+ roots: ['/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common'],
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/kibana.jsonc b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/kibana.jsonc
new file mode 100644
index 0000000000000..e902264afb61a
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/kibana.jsonc
@@ -0,0 +1,12 @@
+{
+ "type": "shared-browser",
+ "id": "@kbn/inference-endpoint-ui-common",
+ "owner": [
+ "@elastic/response-ops",
+ "@elastic/appex-ai-infra",
+ "@elastic/obs-ai-assistant",
+ "@elastic/security-generative-ai"
+ ],
+ "group": "platform",
+ "visibility": "shared"
+}
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/package.json b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/package.json
new file mode 100644
index 0000000000000..c3ea31bb0a4f0
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/package.json
@@ -0,0 +1,6 @@
+{
+ "name": "@kbn/inference-endpoint-ui-common",
+ "private": true,
+ "version": "1.0.0",
+ "license": "Elastic License 2.0"
+}
\ No newline at end of file
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/setup_tests.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/setup_tests.ts
new file mode 100644
index 0000000000000..72e0edd0d07f7
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/setup_tests.ts
@@ -0,0 +1,9 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+// eslint-disable-next-line import/no-extraneous-dependencies
+import '@testing-library/jest-dom';
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/additional_options_fields.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/additional_options_fields.tsx
new file mode 100644
index 0000000000000..381b15b78020f
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/additional_options_fields.tsx
@@ -0,0 +1,290 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useMemo } from 'react';
+import { css } from '@emotion/react';
+
+import {
+ EuiFormRow,
+ EuiSpacer,
+ EuiTitle,
+ EuiAccordion,
+ EuiFieldText,
+ useEuiTheme,
+ EuiTextColor,
+ EuiButtonGroup,
+ EuiPanel,
+ EuiButtonEmpty,
+ EuiCopy,
+ EuiButton,
+ useEuiFontSize,
+ EuiText,
+} from '@elastic/eui';
+import {
+ getFieldValidityAndErrorMessage,
+ UseField,
+ useFormContext,
+} from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { fieldValidators } from '@kbn/es-ui-shared-plugin/static/forms/helpers';
+import { ConfigurationFormItems } from './configuration/configuration_form_items';
+import * as LABELS from '../translations';
+import { DEFAULT_TASK_TYPE } from '../constants';
+import { Config, ConfigEntryView } from '../types/types';
+import { TaskTypeOption } from '../utils/helpers';
+
+// Custom trigger button CSS
+const buttonCss = css`
+ &:hover {
+ text-decoration: none;
+ }
+`;
+
+interface AdditionalOptionsFieldsProps {
+ config: Config;
+ optionalProviderFormFields: ConfigEntryView[];
+ onSetProviderConfigEntry: (key: string, value: unknown) => Promise;
+ onTaskTypeOptionsSelect: (taskType: string, provider?: string) => void;
+ selectedTaskType?: string;
+ taskTypeOptions: TaskTypeOption[];
+}
+
+export const AdditionalOptionsFields: React.FC = ({
+ config,
+ taskTypeOptions,
+ optionalProviderFormFields,
+ selectedTaskType,
+ onSetProviderConfigEntry,
+ onTaskTypeOptionsSelect,
+}) => {
+ const xsFontSize = useEuiFontSize('xs').fontSize;
+ const { euiTheme } = useEuiTheme();
+ const { setFieldValue } = useFormContext();
+
+ const taskTypeSettings = useMemo(
+ () =>
+ selectedTaskType || config.taskType?.length ? (
+ <>
+
+
+
+
+
+
+
+
+
+
+ {(field) => {
+ const { isInvalid, errorMessage } = getFieldValidityAndErrorMessage(field);
+
+ return (
+
+ {taskTypeOptions.length === 1 ? (
+ onTaskTypeOptionsSelect(config.taskType)}
+ >
+ {config.taskType}
+
+ ) : (
+ onTaskTypeOptionsSelect(id)}
+ options={taskTypeOptions}
+ color="text"
+ type="single"
+ />
+ )}
+
+ );
+ }}
+
+ >
+ ) : null,
+ [
+ selectedTaskType,
+ config.taskType,
+ xsFontSize,
+ euiTheme.colors,
+ taskTypeOptions,
+ onTaskTypeOptionsSelect,
+ ]
+ );
+
+ const inferenceUri = useMemo(() => `_inference/${selectedTaskType}/`, [selectedTaskType]);
+
+ return (
+
+
+
+ }
+ initialIsOpen={true}
+ >
+
+
+ {optionalProviderFormFields.length > 0 ? (
+ <>
+
+
+
+
+
+
+
+
+
+
+
+ >
+ ) : null}
+
+ {taskTypeSettings}
+
+
+
+
+
+
+
+
+
+
+
+
+ {(field) => {
+ const { isInvalid, errorMessage } = getFieldValidityAndErrorMessage(field);
+
+ return (
+
+ }
+ >
+ {
+ setFieldValue('config.inferenceId', e.target.value);
+ }}
+ prepend={inferenceUri}
+ append={
+
+ {(copy) => (
+
+
+
+ )}
+
+ }
+ />
+
+ );
+ }}
+
+
+
+ );
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_field.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_field.tsx
new file mode 100644
index 0000000000000..6772a6aaaf391
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_field.tsx
@@ -0,0 +1,228 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useEffect, useState } from 'react';
+
+import {
+ EuiAccordion,
+ EuiFieldText,
+ EuiFieldPassword,
+ EuiSwitch,
+ EuiTextArea,
+ EuiFieldNumber,
+} from '@elastic/eui';
+
+import { isEmpty } from 'lodash/fp';
+import { ConfigEntryView, FieldType } from '../../types/types';
+import { ensureBooleanType, ensureCorrectTyping, ensureStringType } from './configuration_utils';
+
+interface ConfigurationFieldProps {
+ configEntry: ConfigEntryView;
+ isLoading: boolean;
+ setConfigValue: (value: number | string | boolean | null) => void;
+}
+
+interface ConfigInputFieldProps {
+ configEntry: ConfigEntryView;
+ isLoading: boolean;
+ validateAndSetConfigValue: (value: string | boolean) => void;
+}
+export const ConfigInputField: React.FC = ({
+ configEntry,
+ isLoading,
+ validateAndSetConfigValue,
+}) => {
+ const { isValid, value, default_value: defaultValue, key } = configEntry;
+ const [innerValue, setInnerValue] = useState(
+ !value || value.toString().length === 0 ? defaultValue : value
+ );
+
+ useEffect(() => {
+ setInnerValue(!value || value.toString().length === 0 ? defaultValue : value);
+ }, [defaultValue, value]);
+ return (
+ {
+ setInnerValue(event.target.value);
+ validateAndSetConfigValue(event.target.value);
+ }}
+ />
+ );
+};
+
+export const ConfigSwitchField: React.FC = ({
+ configEntry,
+ isLoading,
+ validateAndSetConfigValue,
+}) => {
+ const { label, value, default_value: defaultValue, key } = configEntry;
+ const [innerValue, setInnerValue] = useState(value ?? defaultValue);
+ useEffect(() => {
+ setInnerValue(value ?? defaultValue);
+ }, [defaultValue, value]);
+ return (
+ {label}
}
+ onChange={(event) => {
+ setInnerValue(event.target.checked);
+ validateAndSetConfigValue(event.target.checked);
+ }}
+ />
+ );
+};
+
+export const ConfigInputTextArea: React.FC = ({
+ isLoading,
+ configEntry,
+ validateAndSetConfigValue,
+}) => {
+ const { isValid, value, default_value: defaultValue, key } = configEntry;
+ const [innerValue, setInnerValue] = useState(value ?? defaultValue);
+ useEffect(() => {
+ setInnerValue(value ?? '');
+ }, [defaultValue, value]);
+ return (
+ {
+ setInnerValue(event.target.value);
+ validateAndSetConfigValue(event.target.value);
+ }}
+ />
+ );
+};
+
+export const ConfigNumberField: React.FC = ({
+ configEntry,
+ isLoading,
+ validateAndSetConfigValue,
+}) => {
+ const { isValid, value, default_value: defaultValue, key } = configEntry;
+ const [innerValue, setInnerValue] = useState(value ?? defaultValue);
+ useEffect(() => {
+ setInnerValue(!value || value.toString().length === 0 ? defaultValue : value);
+ }, [defaultValue, value]);
+ return (
+ {
+ const newValue = isEmpty(event.target.value) ? '0' : event.target.value;
+ setInnerValue(newValue);
+ validateAndSetConfigValue(newValue);
+ }}
+ />
+ );
+};
+
+export const ConfigSensitiveTextArea: React.FC = ({
+ isLoading,
+ configEntry,
+ validateAndSetConfigValue,
+}) => {
+ const { key, label } = configEntry;
+ return (
+ {label}}>
+
+
+ );
+};
+
+export const ConfigInputPassword: React.FC = ({
+ isLoading,
+ configEntry,
+ validateAndSetConfigValue,
+}) => {
+ const { value, key } = configEntry;
+ const [innerValue, setInnerValue] = useState(value ?? null);
+ useEffect(() => {
+ setInnerValue(value ?? null);
+ }, [value]);
+ return (
+ <>
+ {
+ setInnerValue(event.target.value);
+ validateAndSetConfigValue(event.target.value);
+ }}
+ />
+ >
+ );
+};
+
+export const ConfigurationField: React.FC = ({
+ configEntry,
+ isLoading,
+ setConfigValue,
+}) => {
+ const validateAndSetConfigValue = (value: number | string | boolean) => {
+ setConfigValue(ensureCorrectTyping(configEntry.type, value));
+ };
+
+ const { key, type, sensitive } = configEntry;
+
+ switch (type) {
+ case FieldType.INTEGER:
+ return (
+
+ );
+
+ case FieldType.BOOLEAN:
+ return (
+
+ );
+
+ default:
+ return sensitive ? (
+
+ ) : (
+
+ );
+ }
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_form_items.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_form_items.tsx
new file mode 100644
index 0000000000000..aa382f054f6be
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_form_items.tsx
@@ -0,0 +1,89 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import {
+ EuiCallOut,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiFormRow,
+ EuiSpacer,
+ EuiText,
+} from '@elastic/eui';
+
+import { ConfigEntryView } from '../../types/types';
+import { ConfigurationField } from './configuration_field';
+import * as LABELS from '../../translations';
+
+interface ConfigurationFormItemsProps {
+ isLoading: boolean;
+ items: ConfigEntryView[];
+ setConfigEntry: (key: string, value: string | number | boolean | null) => void;
+ direction?: 'column' | 'row' | 'rowReverse' | 'columnReverse' | undefined;
+}
+
+export const ConfigurationFormItems: React.FC = ({
+ isLoading,
+ items,
+ setConfigEntry,
+ direction,
+}) => {
+ return (
+
+ {items.map((configEntry) => {
+ const { key, isValid, label, sensitive, description, validationErrors, required } =
+ configEntry;
+
+ // toggle and sensitive textarea labels go next to the element, not in the row
+ const rowLabel = description ? (
+
+
+ {label}
+
+
+ ) : (
+ {label}
+ );
+
+ const optionalLabel = !required ? (
+
+ {LABELS.OPTIONALTEXT}
+
+ ) : undefined;
+
+ return (
+
+
+ {
+ setConfigEntry(key, value);
+ }}
+ />
+
+ {sensitive ? (
+ <>
+
+
+ >
+ ) : null}
+
+ );
+ })}
+
+ );
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.test.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.test.ts
new file mode 100644
index 0000000000000..9345dcd002c32
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.test.ts
@@ -0,0 +1,62 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { FieldType } from '@kbn/search-connectors/types';
+import { ensureBooleanType, ensureCorrectTyping, ensureStringType } from './configuration_utils';
+
+describe('configuration utils', () => {
+ describe('ensureBooleanType', () => {
+ it('converts truthy values to true', () => {
+ expect(ensureBooleanType('true')).toBe(true);
+ expect(ensureBooleanType(1)).toBe(true);
+ expect(ensureBooleanType(true)).toBe(true);
+ expect(ensureBooleanType('any string')).toBe(true);
+ });
+
+ it('converts falsy values to false', () => {
+ expect(ensureBooleanType('')).toBe(false);
+ expect(ensureBooleanType(0)).toBe(false);
+ expect(ensureBooleanType(false)).toBe(false);
+ expect(ensureBooleanType(null)).toBe(false);
+ });
+ });
+
+ describe('ensureStringType', () => {
+ it('converts values to string', () => {
+ expect(ensureStringType('test')).toBe('test');
+ expect(ensureStringType(123)).toBe('123');
+ expect(ensureStringType(true)).toBe('true');
+ expect(ensureStringType(false)).toBe('false');
+ });
+
+ it('converts null to empty string', () => {
+ expect(ensureStringType(null)).toBe('');
+ });
+ });
+
+ describe('ensureCorrectTyping', () => {
+ it('handles integer type', () => {
+ expect(ensureCorrectTyping(FieldType.INTEGER, '123')).toBe(123);
+ expect(ensureCorrectTyping(FieldType.INTEGER, 456)).toBe(456);
+ expect(ensureCorrectTyping(FieldType.INTEGER, 'invalid')).toBe('invalid');
+ expect(ensureCorrectTyping(FieldType.INTEGER, null)).toBe(null);
+ });
+
+ it('handles boolean type', () => {
+ expect(ensureCorrectTyping(FieldType.BOOLEAN, true)).toBe(true);
+ expect(ensureCorrectTyping(FieldType.BOOLEAN, 1)).toBe(true);
+ expect(ensureCorrectTyping(FieldType.BOOLEAN, false)).toBe(false);
+ expect(ensureCorrectTyping(FieldType.BOOLEAN, null)).toBe(false);
+ });
+
+ it('handles string type', () => {
+ expect(ensureCorrectTyping(FieldType.STRING, 'test')).toBe('test');
+ expect(ensureCorrectTyping(FieldType.STRING, 123)).toBe('123');
+ expect(ensureCorrectTyping(FieldType.STRING, null)).toBe('');
+ });
+ });
+});
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.ts
new file mode 100644
index 0000000000000..45e886b368443
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/configuration/configuration_utils.ts
@@ -0,0 +1,49 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { FieldType } from '../../types/types';
+
+export const validIntInput = (value: string | number | boolean | null): boolean => {
+ // reject non integers (including x.0 floats), but don't validate if empty
+ return (value !== null || value !== '') &&
+ (isNaN(Number(value)) ||
+ !Number.isSafeInteger(Number(value)) ||
+ ensureStringType(value).indexOf('.') >= 0)
+ ? false
+ : true;
+};
+
+export const ensureCorrectTyping = (
+ type: FieldType,
+ value: string | number | boolean | null
+): string | number | boolean | null => {
+ switch (type) {
+ case FieldType.INTEGER:
+ return validIntInput(value) ? ensureIntType(value) : value;
+ case FieldType.BOOLEAN:
+ return ensureBooleanType(value);
+ default:
+ return ensureStringType(value);
+ }
+};
+
+export const ensureStringType = (value: string | number | boolean | null): string => {
+ return value !== null ? String(value) : '';
+};
+
+export const ensureIntType = (value: string | number | boolean | null): number | null => {
+ // int is null-safe to prevent empty values from becoming zeroes
+ if (value === null || value === '') {
+ return null;
+ }
+
+ return parseInt(String(value), 10);
+};
+
+export const ensureBooleanType = (value: string | number | boolean | null): boolean => {
+ return Boolean(value);
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_config_hidden_field.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_config_hidden_field.tsx
new file mode 100644
index 0000000000000..4196bd0a2b709
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_config_hidden_field.tsx
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { UseField } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import { HiddenField } from '@kbn/es-ui-shared-plugin/static/forms/components';
+import React from 'react';
+import { ConfigEntryView } from '../../types/types';
+import { getNonEmptyValidator } from '../../utils/helpers';
+
+interface ProviderConfigHiddenFieldProps {
+ providerSchema: ConfigEntryView[];
+ setRequiredProviderFormFields: React.Dispatch>;
+ isSubmitting: boolean;
+}
+
+export const ProviderConfigHiddenField: React.FC = ({
+ providerSchema,
+ setRequiredProviderFormFields,
+ isSubmitting,
+}) => (
+
+);
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_secret_hidden_field.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_secret_hidden_field.tsx
new file mode 100644
index 0000000000000..8060fe1cce13d
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/hidden_fields/provider_secret_hidden_field.tsx
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { UseField } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import React from 'react';
+import { HiddenField } from '@kbn/es-ui-shared-plugin/static/forms/components';
+import { ConfigEntryView } from '../../types/types';
+import { getNonEmptyValidator } from '../../utils/helpers';
+
+interface ProviderSecretHiddenFieldProps {
+ providerSchema: ConfigEntryView[];
+ setRequiredProviderFormFields: React.Dispatch>;
+ isSubmitting: boolean;
+}
+
+export const ProviderSecretHiddenField: React.FC = ({
+ providerSchema,
+ setRequiredProviderFormFields,
+ isSubmitting,
+}) => (
+
+);
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx
new file mode 100644
index 0000000000000..c5d19aa26919e
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx
@@ -0,0 +1,188 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { InferenceServiceFormFields } from './inference_service_form_fields';
+import { FieldType, InferenceProvider } from '../types/types';
+import React from 'react';
+import { render, screen } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import { Form, useForm } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import { I18nProvider } from '@kbn/i18n-react';
+
+const providers = [
+ {
+ service: 'hugging_face',
+ name: 'Hugging Face',
+ task_types: ['text_embedding', 'sparse_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/embeddings',
+ description: 'The URL endpoint to use for the requests.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'cohere',
+ name: 'Cohere',
+ task_types: ['text_embedding', 'rerank', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ },
+ },
+ {
+ service: 'anthropic',
+ name: 'Anthropic',
+ task_types: ['completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'By default, the anthropic service sets the number of requests allowed per minute to 50.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+] as InferenceProvider[];
+
+const MockFormProvider = ({ children }: { children: React.ReactElement }) => {
+ const { form } = useForm();
+
+ return (
+
+
+
+ );
+};
+
+describe('Inference Services', () => {
+ it('renders', () => {
+ render(
+
+
+
+ );
+
+ expect(screen.getByTestId('provider-select')).toBeInTheDocument();
+ });
+
+ it('renders Selectable', async () => {
+ render(
+
+
+
+ );
+
+ await userEvent.click(screen.getByTestId('provider-select'));
+ expect(screen.getByTestId('euiSelectableList')).toBeInTheDocument();
+ });
+
+ it('renders selected provider fields - hugging_face', async () => {
+ render(
+
+
+
+ );
+
+ await userEvent.click(screen.getByTestId('provider-select'));
+ await userEvent.click(screen.getByText('Hugging Face'));
+
+ expect(screen.getByTestId('provider-select')).toHaveValue('Hugging Face');
+ expect(screen.getByTestId('api_key-password')).toBeInTheDocument();
+ expect(screen.getByTestId('url-input')).toBeInTheDocument();
+ expect(screen.getByTestId('taskTypeSelect')).toBeInTheDocument();
+ expect(screen.getByTestId('inference-endpoint-input-field')).toBeInTheDocument();
+ expect(screen.queryByTestId('inference-endpoint-input-field')).toHaveDisplayValue(
+ /hugging_face-text_embedding/
+ );
+ });
+
+ it('re-renders fields when selected to anthropic from hugging_face', async () => {
+ render(
+
+
+
+ );
+
+ await userEvent.click(screen.getByTestId('provider-select'));
+ await userEvent.click(screen.getByText('Hugging Face'));
+ expect(screen.getByTestId('provider-select')).toHaveValue('Hugging Face');
+
+ await userEvent.click(screen.getByTestId('provider-select'));
+ await userEvent.click(screen.getByText('Anthropic'));
+
+ expect(screen.getByTestId('provider-select')).toHaveValue('Anthropic');
+ expect(screen.getByTestId('api_key-password')).toBeInTheDocument();
+ expect(screen.getByTestId('model_id-input')).toBeInTheDocument();
+ expect(screen.getByTestId('taskTypeSelectSingle')).toBeInTheDocument();
+ expect(screen.getByTestId('inference-endpoint-input-field')).toBeInTheDocument();
+ expect(screen.queryByTestId('inference-endpoint-input-field')).toHaveDisplayValue(
+ /anthropic-completion/
+ );
+ });
+});
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.tsx
new file mode 100644
index 0000000000000..98e4dfdd6afbc
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.tsx
@@ -0,0 +1,375 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useCallback, useEffect, useMemo, useState } from 'react';
+import {
+ getFieldValidityAndErrorMessage,
+ UseField,
+ useFormContext,
+ useFormData,
+} from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import { fieldValidators } from '@kbn/es-ui-shared-plugin/static/forms/helpers';
+import {
+ EuiFieldText,
+ EuiFieldTextProps,
+ EuiFormControlLayout,
+ EuiFormRow,
+ EuiHorizontalRule,
+ EuiInputPopover,
+ EuiSpacer,
+ keys,
+} from '@elastic/eui';
+import { FormattedMessage } from '@kbn/i18n-react';
+import { ConnectorFormSchema } from '@kbn/triggers-actions-ui-plugin/public';
+
+import * as LABELS from '../translations';
+import { Config, ConfigEntryView, FieldType, InferenceProvider, Secrets } from '../types/types';
+import { SERVICE_PROVIDERS } from './providers/render_service_provider/service_provider';
+import { DEFAULT_TASK_TYPE, ServiceProviderKeys } from '../constants';
+import { SelectableProvider } from './providers/selectable';
+import { TaskTypeOption, generateInferenceEndpointId, getTaskTypeOptions } from '../utils/helpers';
+import { ConfigurationFormItems } from './configuration/configuration_form_items';
+import { AdditionalOptionsFields } from './additional_options_fields';
+import { ProviderSecretHiddenField } from './hidden_fields/provider_secret_hidden_field';
+import { ProviderConfigHiddenField } from './hidden_fields/provider_config_hidden_field';
+
+interface InferenceServicesProps {
+ providers: InferenceProvider[];
+}
+
+export const InferenceServiceFormFields: React.FC = ({ providers }) => {
+ const [isProviderPopoverOpen, setProviderPopoverOpen] = useState(false);
+ const [providerSchema, setProviderSchema] = useState([]);
+ const [taskTypeOptions, setTaskTypeOptions] = useState([]);
+ const [selectedTaskType, setSelectedTaskType] = useState(DEFAULT_TASK_TYPE);
+
+ const { updateFieldValues, setFieldValue, validateFields, isSubmitting } = useFormContext();
+ const [requiredProviderFormFields, setRequiredProviderFormFields] = useState(
+ []
+ );
+ const [optionalProviderFormFields, setOptionalProviderFormFields] = useState(
+ []
+ );
+ const [{ config, secrets }] = useFormData>({
+ watch: [
+ 'secrets.providerSecrets',
+ 'config.taskType',
+ 'config.inferenceId',
+ 'config.provider',
+ 'config.providerConfig',
+ ],
+ });
+
+ const toggleProviderPopover = useCallback(() => {
+ setProviderPopoverOpen((isOpen) => !isOpen);
+ }, []);
+
+ const closeProviderPopover = useCallback(() => {
+ setProviderPopoverOpen(false);
+ }, []);
+
+ const handleProviderKeyboardOpen: EuiFieldTextProps['onKeyDown'] = useCallback((event: any) => {
+ if (event.key === keys.ENTER) {
+ setProviderPopoverOpen(true);
+ }
+ }, []);
+
+ const providerIcon = useMemo(
+ () =>
+ Object.keys(SERVICE_PROVIDERS).includes(config?.provider)
+ ? SERVICE_PROVIDERS[config?.provider as ServiceProviderKeys].icon
+ : undefined,
+ [config?.provider]
+ );
+
+ const providerName = useMemo(
+ () =>
+ Object.keys(SERVICE_PROVIDERS).includes(config?.provider)
+ ? SERVICE_PROVIDERS[config?.provider as ServiceProviderKeys].name
+ : config?.provider,
+ [config?.provider]
+ );
+
+ const onTaskTypeOptionsSelect = useCallback(
+ (taskType: string) => {
+ setSelectedTaskType(taskType);
+
+ const inferenceId = generateInferenceEndpointId({
+ ...config,
+ taskType,
+ });
+
+ updateFieldValues({
+ config: {
+ taskType,
+ inferenceId,
+ },
+ });
+ },
+ [config, updateFieldValues]
+ );
+
+ const onProviderChange = useCallback(
+ (provider?: string) => {
+ const newProvider = providers?.find((p) => p.service === provider);
+
+ setTaskTypeOptions(getTaskTypeOptions(newProvider?.task_types ?? []));
+ if (newProvider?.task_types && newProvider?.task_types.length > 0) {
+ onTaskTypeOptionsSelect(newProvider?.task_types[0]);
+ }
+
+ const newProviderSchema: ConfigEntryView[] = Object.keys(
+ newProvider?.configurations ?? {}
+ ).map(
+ (k): ConfigEntryView => ({
+ key: k,
+ isValid: true,
+ validationErrors: [],
+ value: newProvider?.configurations[k].default_value ?? null,
+ default_value: newProvider?.configurations[k].default_value ?? null,
+ description: newProvider?.configurations[k].description ?? null,
+ label: newProvider?.configurations[k].label ?? '',
+ required: newProvider?.configurations[k].required ?? false,
+ sensitive: newProvider?.configurations[k].sensitive ?? false,
+ updatable: newProvider?.configurations[k].updatable ?? false,
+ type: newProvider?.configurations[k].type ?? FieldType.STRING,
+ })
+ );
+
+ setProviderSchema(newProviderSchema);
+
+ const defaultProviderConfig: Record = {};
+ const defaultProviderSecrets: Record = {};
+
+ Object.keys(newProvider?.configurations ?? {}).forEach((k) => {
+ if (!newProvider?.configurations[k].sensitive) {
+ if (newProvider?.configurations[k] && !!newProvider?.configurations[k].default_value) {
+ defaultProviderConfig[k] = newProvider.configurations[k].default_value;
+ } else {
+ defaultProviderConfig[k] = null;
+ }
+ } else {
+ defaultProviderSecrets[k] = null;
+ }
+ });
+ const inferenceId = generateInferenceEndpointId({
+ ...config,
+ provider: newProvider?.service ?? '',
+ taskType: newProvider?.task_types[0] ?? DEFAULT_TASK_TYPE,
+ });
+
+ updateFieldValues({
+ config: {
+ provider: newProvider?.service,
+ providerConfig: defaultProviderConfig,
+ inferenceId,
+ },
+ secrets: {
+ providerSecrets: defaultProviderSecrets,
+ },
+ });
+ },
+ [config, onTaskTypeOptionsSelect, providers, updateFieldValues]
+ );
+
+ const onSetProviderConfigEntry = useCallback(
+ async (key: string, value: unknown) => {
+ const entry: ConfigEntryView | undefined = providerSchema.find(
+ (p: ConfigEntryView) => p.key === key
+ );
+ if (entry) {
+ if (entry.sensitive) {
+ if (!secrets.providerSecrets) {
+ secrets.providerSecrets = {};
+ }
+ const newSecrets = { ...secrets.providerSecrets };
+ newSecrets[key] = value;
+ setFieldValue('secrets.providerSecrets', newSecrets);
+ await validateFields(['secrets.providerSecrets']);
+ } else {
+ if (!config.providerConfig) {
+ config.providerConfig = {};
+ }
+ const newConfig = { ...config.providerConfig };
+ newConfig[key] = value;
+ setFieldValue('config.providerConfig', newConfig);
+ await validateFields(['config.providerConfig']);
+ }
+ }
+ },
+ [config, providerSchema, secrets, setFieldValue, validateFields]
+ );
+
+ const onClearProvider = useCallback(() => {
+ onProviderChange();
+ setFieldValue('config.taskType', '');
+ setFieldValue('config.provider', '');
+ }, [onProviderChange, setFieldValue]);
+
+ const providerSuperSelect = useCallback(
+ (isInvalid: boolean) => (
+
+ {
+ /* Intentionally left blank as onChange is required to avoid console error
+ but not used in this context
+ */
+ }}
+ />
+
+ ),
+ [
+ config?.provider,
+ handleProviderKeyboardOpen,
+ toggleProviderPopover,
+ isProviderPopoverOpen,
+ onClearProvider,
+ providerIcon,
+ providerName,
+ ]
+ );
+
+ useEffect(() => {
+ if (isSubmitting) {
+ validateFields(['config.providerConfig']);
+ validateFields(['secrets.providerSecrets']);
+ }
+ }, [isSubmitting, config, validateFields]);
+
+ useEffect(() => {
+ // Set values from the provider secrets and config to the schema
+ const existingConfiguration = providerSchema
+ ? providerSchema.map((item: ConfigEntryView) => {
+ const itemValue: ConfigEntryView = item;
+ itemValue.isValid = true;
+ if (item.sensitive && secrets?.providerSecrets) {
+ const secretValue = secrets.providerSecrets[item.key];
+ if (
+ typeof secretValue === 'string' ||
+ typeof secretValue === 'number' ||
+ typeof secretValue === 'boolean' ||
+ secretValue === null
+ ) {
+ itemValue.value = secretValue;
+ }
+ } else if (config?.providerConfig) {
+ const configValue = config.providerConfig[item.key];
+ if (
+ typeof configValue === 'string' ||
+ typeof configValue === 'number' ||
+ typeof configValue === 'boolean' ||
+ configValue === null
+ ) {
+ itemValue.value = configValue;
+ }
+ }
+ return itemValue;
+ })
+ : [];
+
+ setOptionalProviderFormFields(existingConfiguration.filter((p) => !p.required && !p.sensitive));
+ setRequiredProviderFormFields(existingConfiguration.filter((p) => p.required || p.sensitive));
+ }, [config?.providerConfig, providerSchema, secrets]);
+
+ return (
+ <>
+
+ {(field) => {
+ const { isInvalid, errorMessage } = getFieldValidityAndErrorMessage(field);
+ const selectInput = providerSuperSelect(isInvalid);
+ return (
+
+ }
+ isInvalid={isInvalid}
+ error={errorMessage}
+ >
+
+
+
+
+ );
+ }}
+
+ {config?.provider ? (
+ <>
+
+
+
+
+
+
+
+
+ >
+ ) : null}
+ >
+ );
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/alibaba_cloud.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/alibaba_cloud.svg
new file mode 100644
index 0000000000000..1ae552d509c3a
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/alibaba_cloud.svg
@@ -0,0 +1,3 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/amazon_bedrock.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/amazon_bedrock.svg
new file mode 100644
index 0000000000000..f8815d4f75ec5
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/amazon_bedrock.svg
@@ -0,0 +1,11 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/anthropic.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/anthropic.svg
new file mode 100644
index 0000000000000..c361cda86a7df
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/anthropic.svg
@@ -0,0 +1,3 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_ai_studio.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_ai_studio.svg
new file mode 100644
index 0000000000000..405e182a10394
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_ai_studio.svg
@@ -0,0 +1,44 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_open_ai.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_open_ai.svg
new file mode 100644
index 0000000000000..122c0c65af13c
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/azure_open_ai.svg
@@ -0,0 +1,9 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/cohere.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/cohere.svg
new file mode 100644
index 0000000000000..69953809fec35
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/cohere.svg
@@ -0,0 +1,9 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/elastic.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/elastic.svg
new file mode 100644
index 0000000000000..e763c2e2f2ab6
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/elastic.svg
@@ -0,0 +1,16 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/google_ai_studio.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/google_ai_studio.svg
new file mode 100644
index 0000000000000..b6e34ae15c9e4
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/google_ai_studio.svg
@@ -0,0 +1,6 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/hugging_face.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/hugging_face.svg
new file mode 100644
index 0000000000000..87ac70c5a18f4
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/hugging_face.svg
@@ -0,0 +1,10 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/ibm_watsonx.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/ibm_watsonx.svg
new file mode 100644
index 0000000000000..5883eff3884d6
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/ibm_watsonx.svg
@@ -0,0 +1,3 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/mistral.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/mistral.svg
new file mode 100644
index 0000000000000..f62258a327594
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/mistral.svg
@@ -0,0 +1,34 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/open_ai.svg b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/open_ai.svg
new file mode 100644
index 0000000000000..9ddc8f8fd63b8
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/assets/images/open_ai.svg
@@ -0,0 +1,3 @@
+
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.test.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.test.tsx
new file mode 100644
index 0000000000000..bd4591c31a73a
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.test.tsx
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { render, screen } from '@testing-library/react';
+import React from 'react';
+import { ServiceProviderIcon, ServiceProviderName } from './service_provider';
+import { ServiceProviderKeys } from '../../../constants';
+
+jest.mock('../assets/images/elastic.svg', () => 'elasticIcon.svg');
+jest.mock('../assets/images/hugging_face.svg', () => 'huggingFaceIcon.svg');
+jest.mock('../assets/images/cohere.svg', () => 'cohereIcon.svg');
+jest.mock('../assets/images/open_ai.svg', () => 'openAIIcon.svg');
+
+describe('ServiceProviderIcon component', () => {
+ it('renders Hugging Face icon and name when providerKey is hugging_face', () => {
+ render();
+ const icon = screen.getByTestId('icon-service-provider-hugging_face');
+ expect(icon).toBeInTheDocument();
+ });
+
+ it('renders Open AI icon and name when providerKey is openai', () => {
+ render();
+ const icon = screen.getByTestId('icon-service-provider-openai');
+ expect(icon).toBeInTheDocument();
+ });
+});
+
+describe('ServiceProviderName component', () => {
+ it('renders Hugging Face icon and name when providerKey is hugging_face', () => {
+ render();
+ expect(screen.getByText('Hugging Face')).toBeInTheDocument();
+ });
+
+ it('renders Open AI icon and name when providerKey is openai', () => {
+ render();
+ expect(screen.getByText('OpenAI')).toBeInTheDocument();
+ });
+});
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.tsx
new file mode 100644
index 0000000000000..e50cfae1d30bc
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/render_service_provider/service_provider.tsx
@@ -0,0 +1,125 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { EuiHighlight, EuiIcon } from '@elastic/eui';
+import React from 'react';
+
+import { ServiceProviderKeys } from '../../../constants';
+import elasticIcon from '../assets/images/elastic.svg';
+import huggingFaceIcon from '../assets/images/hugging_face.svg';
+import cohereIcon from '../assets/images/cohere.svg';
+import openAIIcon from '../assets/images/open_ai.svg';
+import azureAIStudioIcon from '../assets/images/azure_ai_studio.svg';
+import azureOpenAIIcon from '../assets/images/azure_open_ai.svg';
+import googleAIStudioIcon from '../assets/images/google_ai_studio.svg';
+import mistralIcon from '../assets/images/mistral.svg';
+import amazonBedrockIcon from '../assets/images/amazon_bedrock.svg';
+import anthropicIcon from '../assets/images/anthropic.svg';
+import alibabaCloudIcon from '../assets/images/alibaba_cloud.svg';
+import ibmWatsonxIcon from '../assets/images/ibm_watsonx.svg';
+
+interface ServiceProviderProps {
+ providerKey: ServiceProviderKeys;
+ searchValue?: string;
+}
+
+export type ProviderSolution = 'Observability' | 'Security' | 'Search';
+
+interface ServiceProviderRecord {
+ icon: string;
+ name: string;
+ solutions: ProviderSolution[];
+}
+
+export const SERVICE_PROVIDERS: Record = {
+ [ServiceProviderKeys.amazonbedrock]: {
+ icon: amazonBedrockIcon,
+ name: 'Amazon Bedrock',
+ solutions: ['Observability', 'Security', 'Search'],
+ },
+ [ServiceProviderKeys.azureaistudio]: {
+ icon: azureAIStudioIcon,
+ name: 'Azure AI Studio',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.azureopenai]: {
+ icon: azureOpenAIIcon,
+ name: 'Azure OpenAI',
+ solutions: ['Observability', 'Security', 'Search'],
+ },
+ [ServiceProviderKeys.anthropic]: {
+ icon: anthropicIcon,
+ name: 'Anthropic',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.cohere]: {
+ icon: cohereIcon,
+ name: 'Cohere',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.elasticsearch]: {
+ icon: elasticIcon,
+ name: 'Elasticsearch',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.googleaistudio]: {
+ icon: googleAIStudioIcon,
+ name: 'Google AI Studio',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.googlevertexai]: {
+ icon: googleAIStudioIcon,
+ name: 'Google Vertex AI',
+ solutions: ['Observability', 'Security', 'Search'],
+ },
+ [ServiceProviderKeys.hugging_face]: {
+ icon: huggingFaceIcon,
+ name: 'Hugging Face',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.mistral]: {
+ icon: mistralIcon,
+ name: 'Mistral',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.openai]: {
+ icon: openAIIcon,
+ name: 'OpenAI',
+ solutions: ['Observability', 'Security', 'Search'],
+ },
+ [ServiceProviderKeys['alibabacloud-ai-search']]: {
+ icon: alibabaCloudIcon,
+ name: 'AlibabaCloud AI Search',
+ solutions: ['Search'],
+ },
+ [ServiceProviderKeys.watsonxai]: {
+ icon: ibmWatsonxIcon,
+ name: 'IBM Watsonx',
+ solutions: ['Search'],
+ },
+};
+
+export const ServiceProviderIcon: React.FC = ({ providerKey }) => {
+ const provider = SERVICE_PROVIDERS[providerKey];
+
+ return provider ? (
+
+ ) : null;
+};
+
+export const ServiceProviderName: React.FC = ({
+ providerKey,
+ searchValue,
+}) => {
+ const provider = SERVICE_PROVIDERS[providerKey];
+
+ return provider ? (
+ {provider.name}
+ ) : (
+ {providerKey}
+ );
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.test.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.test.tsx
new file mode 100644
index 0000000000000..6e2bedbcf4516
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.test.tsx
@@ -0,0 +1,75 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+import { render, screen } from '@testing-library/react';
+import { FieldType } from '../../types/types';
+import { SelectableProvider } from './selectable';
+
+const providers = [
+ {
+ service: 'hugging_face',
+ name: 'Hugging Face',
+ task_types: ['text_embedding', 'sparse_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/embeddings',
+ description: 'The URL endpoint to use for the requests.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+];
+
+describe('SelectableProvider', () => {
+ const props = {
+ providers,
+ onClosePopover: jest.fn(),
+ onProviderChange: jest.fn(),
+ };
+ describe('should render', () => {
+ describe('provider', () => {
+ afterAll(() => {
+ jest.clearAllMocks();
+ });
+
+ test('render placeholder', async () => {
+ render();
+ const searchInput = screen.getByTestId('provider-super-select-search-box');
+ expect(searchInput).toHaveAttribute('placeholder', 'Search');
+ });
+
+ test('render list of providers', async () => {
+ render();
+ const listOfProviders = screen.queryAllByRole('option');
+ expect(listOfProviders).toHaveLength(1);
+ });
+ });
+ });
+});
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.tsx
new file mode 100644
index 0000000000000..ab125ce273366
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/providers/selectable.tsx
@@ -0,0 +1,133 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ EuiBadge,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiSelectable,
+ EuiSelectableOption,
+ EuiSelectableProps,
+} from '@elastic/eui';
+import React, { useCallback, useMemo } from 'react';
+
+import {
+ ProviderSolution,
+ SERVICE_PROVIDERS,
+ ServiceProviderIcon,
+ ServiceProviderName,
+} from './render_service_provider/service_provider';
+import { ServiceProviderKeys } from '../../constants';
+import { InferenceProvider } from '../../types/types';
+import * as i18n from '../../translations';
+
+interface SelectableProviderProps {
+ providers: InferenceProvider[];
+ onClosePopover: () => void;
+ onProviderChange: (provider?: string) => void;
+}
+
+export const SelectableProvider: React.FC = ({
+ providers,
+ onClosePopover,
+ onProviderChange,
+}) => {
+ const renderProviderOption = useCallback>(
+ (option, searchValue) => {
+ const provider = Object.keys(SERVICE_PROVIDERS).includes(option.label)
+ ? SERVICE_PROVIDERS[option.label as ServiceProviderKeys]
+ : undefined;
+
+ const supportedBySolutions = (provider &&
+ provider.solutions.map((solution) => (
+
+ {solution}
+
+ ))) ?? (
+
+ {'Search' as ProviderSolution}
+
+ );
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {supportedBySolutions}
+
+
+
+ );
+ },
+ []
+ );
+
+ const EuiSelectableContent = useCallback>(
+ (list, search) => (
+ <>
+ {search}
+ {list}
+ >
+ ),
+ []
+ );
+
+ const searchProps: EuiSelectableProps['searchProps'] = useMemo(
+ () => ({
+ 'data-test-subj': 'provider-super-select-search-box',
+ placeholder: i18n.SEARCHLABEL,
+ incremental: false,
+ compressed: true,
+ fullWidth: true,
+ }),
+ []
+ );
+
+ const handleProviderChange = useCallback>(
+ (options) => {
+ const selectedProvider = options.filter((option) => option.checked === 'on');
+ if (selectedProvider != null && selectedProvider.length > 0) {
+ onProviderChange(selectedProvider[0].label);
+ }
+ onClosePopover();
+ },
+ [onClosePopover, onProviderChange]
+ );
+
+ const getSelectableOptions = useCallback(() => {
+ return providers?.map((p) => ({
+ label: p.service,
+ key: p.service,
+ })) as EuiSelectableOption[];
+ }, [providers]);
+
+ return (
+
+ {EuiSelectableContent}
+
+ );
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/constants.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/constants.ts
new file mode 100644
index 0000000000000..1ea2152ba2f49
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/constants.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+export enum ServiceProviderKeys {
+ amazonbedrock = 'amazonbedrock',
+ azureopenai = 'azureopenai',
+ azureaistudio = 'azureaistudio',
+ cohere = 'cohere',
+ elasticsearch = 'elasticsearch',
+ googleaistudio = 'googleaistudio',
+ googlevertexai = 'googlevertexai',
+ hugging_face = 'hugging_face',
+ mistral = 'mistral',
+ openai = 'openai',
+ anthropic = 'anthropic',
+ watsonxai = 'watsonxai',
+ 'alibabacloud-ai-search' = 'alibabacloud-ai-search',
+}
+
+export const DEFAULT_TASK_TYPE = 'completion';
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts
new file mode 100644
index 0000000000000..6258fc94687fe
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts
@@ -0,0 +1,129 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+export const getRequiredMessage = (field: string) => {
+ return i18n.translate('xpack.inferenceEndpointUICommon.components.requiredGenericTextField', {
+ defaultMessage: '{field} is required.',
+ values: { field },
+ });
+};
+
+export const INPUT_INVALID = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.params.error.invalidInputText',
+ {
+ defaultMessage: 'Input does not have a valid Array format.',
+ }
+);
+
+export const INVALID_ACTION = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.invalidActionText',
+ {
+ defaultMessage: 'Invalid action name.',
+ }
+);
+
+export const BODY = i18n.translate('xpack.inferenceEndpointUICommon.components.bodyFieldLabel', {
+ defaultMessage: 'Body',
+});
+
+export const INPUT = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.completionInputLabel',
+ {
+ defaultMessage: 'Input',
+ }
+);
+
+export const INPUT_TYPE = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.completionInputTypeLabel',
+ {
+ defaultMessage: 'Input type',
+ }
+);
+
+export const QUERY = i18n.translate('xpack.inferenceEndpointUICommon.components.rerankQueryLabel', {
+ defaultMessage: 'Query',
+});
+
+export const BODY_DESCRIPTION = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.bodyCodeEditorAriaLabel',
+ {
+ defaultMessage: 'Code editor',
+ }
+);
+
+export const TASK_TYPE = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.taskTypeFieldLabel',
+ {
+ defaultMessage: 'Task type',
+ }
+);
+
+export const PROVIDER = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.providerFieldLabel',
+ {
+ defaultMessage: 'Provider',
+ }
+);
+
+export const PROVIDER_REQUIRED = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.error.requiredProviderText',
+ {
+ defaultMessage: 'Provider is required.',
+ }
+);
+
+export const DOCUMENTATION = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.documentation',
+ {
+ defaultMessage: 'Inference API documentation',
+ }
+);
+
+export const SELECT_PROVIDER = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.selectProvider',
+ {
+ defaultMessage: 'Select a service',
+ }
+);
+
+export const COPY_TOOLTIP = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.copy.tooltip',
+ {
+ defaultMessage: 'Copy to clipboard',
+ }
+);
+
+export const COPIED_TOOLTIP = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.copied.tooltip',
+ {
+ defaultMessage: 'Copied!',
+ }
+);
+
+export const SEARCHLABEL = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.searchLabel',
+ {
+ defaultMessage: 'Search',
+ }
+);
+
+export const OPTIONALTEXT = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.optionalText',
+ {
+ defaultMessage: 'Optional',
+ }
+);
+
+export const RE_ENTER_SECRETS = (label: string) => {
+ return i18n.translate('xpack.inferenceEndpointUICommon.components.requiredGenericTextField', {
+ defaultMessage:
+ 'You will need to reenter your ${label} each time you edit the inference endpoint',
+ values: { label },
+ });
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts
new file mode 100644
index 0000000000000..fc1f32b668811
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts
@@ -0,0 +1,51 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { FieldType } from '@kbn/search-connectors';
+
+export { FieldType } from '@kbn/search-connectors';
+
+export interface ConfigProperties {
+ default_value: string | number | boolean | null;
+ description: string | null;
+ label: string;
+ required: boolean;
+ sensitive: boolean;
+ updatable: boolean;
+ type: FieldType;
+}
+
+interface ConfigEntry extends ConfigProperties {
+ key: string;
+}
+
+export interface ConfigEntryView extends ConfigEntry {
+ isValid: boolean;
+ validationErrors: string[];
+ value: string | number | boolean | null;
+}
+
+export type FieldsConfiguration = Record;
+
+export interface InferenceProvider {
+ service: string;
+ name: string;
+ task_types: string[];
+ logo?: string;
+ configurations: FieldsConfiguration;
+}
+
+export interface Config {
+ taskType: string;
+ taskTypeConfig?: Record;
+ inferenceId: string;
+ provider: string;
+ providerConfig?: Record;
+}
+
+export interface Secrets {
+ providerSecrets?: Record;
+}
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/helpers.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/helpers.ts
new file mode 100644
index 0000000000000..168d2fe37faa0
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/helpers.ts
@@ -0,0 +1,80 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { ValidationFunc } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import { isEmpty } from 'lodash/fp';
+import { Config, ConfigEntryView } from '../types/types';
+import * as LABELS from '../translations';
+
+export interface TaskTypeOption {
+ id: string;
+ value: string;
+ label: string;
+}
+
+export const getTaskTypeOptions = (taskTypes: string[]): TaskTypeOption[] =>
+ taskTypes.map((taskType) => ({
+ id: taskType,
+ label: taskType,
+ value: taskType,
+ }));
+
+export const generateInferenceEndpointId = (config: Config) => {
+ const taskTypeSuffix = config.taskType ? `${config.taskType}-` : '';
+ const inferenceEndpointId = `${config.provider}-${taskTypeSuffix}${Math.random()
+ .toString(36)
+ .slice(2)}`;
+ return inferenceEndpointId;
+};
+
+export const getNonEmptyValidator = (
+ schema: ConfigEntryView[],
+ validationEventHandler: (fieldsWithErrors: ConfigEntryView[]) => void,
+ isSubmitting: boolean = false,
+ isSecrets: boolean = false
+) => {
+ return (...args: Parameters): ReturnType => {
+ const [{ value, path }] = args;
+ const newSchema: ConfigEntryView[] = [];
+
+ const configData = (value ?? {}) as Record;
+ let hasErrors = false;
+ if (schema) {
+ schema
+ .filter((f: ConfigEntryView) => f.required)
+ .forEach((field: ConfigEntryView) => {
+ // validate if submitting or on field edit - value is not default to null
+ if (configData[field.key] !== null || isSubmitting) {
+ // validate secrets fields separately from regular
+ if (isSecrets ? field.sensitive : !field.sensitive) {
+ if (
+ !configData[field.key] ||
+ (typeof configData[field.key] === 'string' && isEmpty(configData[field.key]))
+ ) {
+ field.validationErrors = [LABELS.getRequiredMessage(field.label)];
+ field.isValid = false;
+ hasErrors = true;
+ } else {
+ field.validationErrors = [];
+ field.isValid = true;
+ }
+ }
+ }
+ newSchema.push(field);
+ });
+
+ validationEventHandler(newSchema);
+ if (hasErrors) {
+ return {
+ code: 'ERR_FIELD_MISSING',
+ path,
+ message: LABELS.getRequiredMessage('Action'),
+ };
+ }
+ }
+ };
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/tsconfig.json b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/tsconfig.json
new file mode 100644
index 0000000000000..f306c4703b7a0
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/tsconfig.json
@@ -0,0 +1,26 @@
+{
+ "extends": "../../../../../tsconfig.base.json",
+ "compilerOptions": {
+ "outDir": "target/types",
+ "types": [
+ "jest",
+ "node",
+ "react",
+ "@kbn/ambient-ui-types"
+ ]
+ },
+ "include": [
+ "**/*.ts",
+ "**/*.tsx"
+ ],
+ "exclude": [
+ "target/**/*"
+ ],
+ "kbn_references": [
+ "@kbn/i18n",
+ "@kbn/i18n-react",
+ "@kbn/search-connectors",
+ "@kbn/es-ui-shared-plugin",
+ "@kbn/triggers-actions-ui-plugin"
+ ]
+}
diff --git a/x-pack/plugins/search_inference_endpoints/common/translations.ts b/x-pack/plugins/search_inference_endpoints/common/translations.ts
index 9d4238b494e6b..ed7fc1f0d80a1 100644
--- a/x-pack/plugins/search_inference_endpoints/common/translations.ts
+++ b/x-pack/plugins/search_inference_endpoints/common/translations.ts
@@ -40,6 +40,13 @@ export const API_DOCUMENTATION_LINK = i18n.translate(
}
);
+export const ADD_ENDPOINT_LABEL = i18n.translate(
+ 'xpack.searchInferenceEndpoints.addConnectorButtonLabel',
+ {
+ defaultMessage: 'Add endpoint',
+ }
+);
+
export const ERROR_TITLE = i18n.translate('xpack.searchInferenceEndpoints.inferenceId.errorTitle', {
defaultMessage: 'Error adding inference endpoint',
});
diff --git a/x-pack/plugins/search_inference_endpoints/common/types.ts b/x-pack/plugins/search_inference_endpoints/common/types.ts
index 2c23f542ee2bb..0c1da5a59b799 100644
--- a/x-pack/plugins/search_inference_endpoints/common/types.ts
+++ b/x-pack/plugins/search_inference_endpoints/common/types.ts
@@ -5,9 +5,12 @@
* 2.0.
*/
+import type { Config, Secrets } from '@kbn/inference-endpoint-ui-common';
+
export enum APIRoutes {
GET_INFERENCE_ENDPOINTS = '/internal/inference_endpoints/endpoints',
- DELETE_INFERENCE_ENDPOINT = '/internal/inference_endpoint/endpoints/{type}/{id}',
+ INFERENCE_ENDPOINT = '/internal/inference_endpoint/endpoints/{type}/{id}',
+ GET_INFERENCE_SERVICES = 'internal/inference_endpoints/_inference/_services',
}
export interface SearchInferenceEndpointsConfigType {
@@ -22,3 +25,10 @@ export enum TaskTypes {
sparse_embedding = 'sparse_embedding',
text_embedding = 'text_embedding',
}
+
+export type { InferenceProvider } from '@kbn/inference-endpoint-ui-common';
+
+export interface InferenceEndpoint {
+ config: Config;
+ secrets: Secrets;
+}
diff --git a/x-pack/plugins/search_inference_endpoints/kibana.jsonc b/x-pack/plugins/search_inference_endpoints/kibana.jsonc
index dca472a92d437..8f65c2be1b16a 100644
--- a/x-pack/plugins/search_inference_endpoints/kibana.jsonc
+++ b/x-pack/plugins/search_inference_endpoints/kibana.jsonc
@@ -29,7 +29,8 @@
"searchNavigation",
],
"requiredBundles": [
- "kibanaReact"
+ "kibanaReact",
+ "esUiShared"
]
}
}
diff --git a/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.test.tsx b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.test.tsx
new file mode 100644
index 0000000000000..2e9f6af8423a3
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.test.tsx
@@ -0,0 +1,87 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { render, screen } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import { Form, useForm } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import React from 'react';
+import { I18nProvider } from '@kbn/i18n-react';
+
+import { AddInferenceFlyoutWrapper } from './add_inference_flyout_wrapper';
+import { mockProviders } from '../../utils/test_utils/test_utils';
+
+const mockAddEndpoint = jest.fn();
+const onClose = jest.fn();
+jest.mock('../../hooks/use_add_endpoint', () => ({
+ useAddEndpoint: () => ({
+ mutate: mockAddEndpoint.mockImplementation(() => Promise.resolve()),
+ }),
+}));
+
+jest.mock('../../hooks/use_providers', () => ({
+ useProviders: jest.fn(() => ({
+ data: mockProviders,
+ })),
+}));
+
+const MockFormProvider = ({ children }: { children: React.ReactElement }) => {
+ const { form } = useForm();
+ return (
+
+
+
+ );
+};
+
+describe('AddInferenceFlyout', () => {
+ it('renders', () => {
+ render(
+
+
+
+ );
+
+ expect(screen.getByTestId('create-inference-flyout')).toBeInTheDocument();
+ expect(screen.getByTestId('create-inference-flyout-header')).toBeInTheDocument();
+ expect(screen.getByTestId('create-inference-flyout-header')).toBeInTheDocument();
+ expect(screen.getByTestId('provider-select')).toBeInTheDocument();
+ expect(screen.getByTestId('add-inference-endpoint-submit-button')).toBeInTheDocument();
+ expect(screen.getByTestId('create-inference-flyout-close-button')).toBeInTheDocument();
+ });
+
+ it('invalidates form if no provider is selected', async () => {
+ render(
+
+
+
+ );
+
+ await userEvent.click(screen.getByTestId('add-inference-endpoint-submit-button'));
+ expect(screen.getByText('Provider is required.')).toBeInTheDocument();
+ expect(mockAddEndpoint).not.toHaveBeenCalled();
+ expect(screen.getByTestId('add-inference-endpoint-submit-button')).toBeDisabled();
+ });
+
+ it('valid submission', async () => {
+ render(
+
+
+
+ );
+
+ await userEvent.click(screen.getByTestId('provider-select'));
+ await userEvent.click(screen.getByText('Anthropic'));
+ await userEvent.type(await screen.findByTestId('api_key-password'), 'test api passcode');
+ await userEvent.type(
+ await screen.findByTestId('model_id-input'),
+ 'sample model name from Anthropic'
+ );
+
+ await userEvent.click(screen.getByTestId('add-inference-endpoint-submit-button'));
+ expect(mockAddEndpoint).toHaveBeenCalled();
+ }, 10e3);
+});
diff --git a/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.tsx b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.tsx
new file mode 100644
index 0000000000000..b2cb4f04645cd
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/add_inference_flyout_wrapper.tsx
@@ -0,0 +1,66 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ EuiButtonEmpty,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiFlyout,
+ EuiFlyoutBody,
+ EuiFlyoutFooter,
+ EuiFlyoutHeader,
+ EuiTitle,
+ useGeneratedHtmlId,
+} from '@elastic/eui';
+import React from 'react';
+
+import { InferenceForm } from './inference_form';
+import * as i18n from './translations';
+
+interface AddInferenceFlyoutWrapperProps {
+ onClose: (state: boolean) => void;
+}
+
+export const AddInferenceFlyoutWrapper: React.FC = ({
+ onClose,
+}) => {
+ const inferenceCreationFlyoutId = useGeneratedHtmlId({
+ prefix: 'addInferenceFlyoutId',
+ });
+ const closeFlyout = () => onClose(false);
+
+ return (
+
+
+
+ {i18n.CREATE_ENDPOINT_TITLE}
+
+
+
+
+
+
+
+
+
+ {i18n.CANCEL}
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/inference_form.tsx b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/inference_form.tsx
new file mode 100644
index 0000000000000..4ed1a1090edb3
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/inference_form.tsx
@@ -0,0 +1,69 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Form, useForm } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import React, { useCallback, useState } from 'react';
+import { InferenceServiceFormFields } from '@kbn/inference-endpoint-ui-common';
+import { EuiButton, EuiFlexGroup, EuiFlexItem, EuiSpacer } from '@elastic/eui';
+import { useProviders } from '../../hooks/use_providers';
+import * as i18n from './translations';
+import { useAddEndpoint } from '../../hooks/use_add_endpoint';
+import { InferenceEndpoint } from '../../types';
+
+interface InferenceFormProps {
+ onSubmitSuccess: (state: boolean) => void;
+}
+export const InferenceForm: React.FC = ({ onSubmitSuccess }) => {
+ const [isLoading, setIsLoading] = useState(false);
+ const onSuccess = useCallback(() => {
+ setIsLoading(false);
+ onSubmitSuccess(false);
+ }, [onSubmitSuccess]);
+ const onError = useCallback(() => {
+ setIsLoading(false);
+ }, []);
+ const { mutate: addEndpoint } = useAddEndpoint(
+ () => onSuccess(),
+ () => onError()
+ );
+ const { data: providers } = useProviders();
+ const { form } = useForm();
+ const handleSubmit = useCallback(async () => {
+ setIsLoading(true);
+ const { isValid, data } = await form.submit();
+
+ if (isValid) {
+ addEndpoint({
+ inferenceEndpoint: data as InferenceEndpoint,
+ });
+ } else {
+ setIsLoading(false);
+ }
+ }, [addEndpoint, form]);
+
+ return providers ? (
+
+ ) : null;
+};
diff --git a/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/translations.ts b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/translations.ts
new file mode 100644
index 0000000000000..330a2b434d4da
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/components/add_inference_endpoints/translations.ts
@@ -0,0 +1,36 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+export const SAVE = i18n.translate(
+ 'xpack.searchInferenceEndpoints.addInferenceEndpoint.saveBtnLabel',
+ {
+ defaultMessage: 'Save',
+ }
+);
+
+export const SAVE_TEST = i18n.translate(
+ 'xpack.searchInferenceEndpoints.addInferenceEndpoint.saveAndTestBtnLabel',
+ {
+ defaultMessage: 'Save and test',
+ }
+);
+
+export const CANCEL = i18n.translate(
+ 'xpack.searchInferenceEndpoints.addInferenceEndpoint.cancelBtnLabel',
+ {
+ defaultMessage: 'Cancel',
+ }
+);
+
+export const CREATE_ENDPOINT_TITLE = i18n.translate(
+ 'xpack.searchInferenceEndpoints.addInferenceEndpoint.createEndpointTitle',
+ {
+ defaultMessage: 'Create Inference Endpoint',
+ }
+);
diff --git a/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints.tsx b/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints.tsx
index c39bc69fc300b..d8d696fee1e9e 100644
--- a/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints.tsx
+++ b/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints.tsx
@@ -5,25 +5,30 @@
* 2.0.
*/
-import React from 'react';
+import React, { useState } from 'react';
import { EuiPageTemplate } from '@elastic/eui';
import { useQueryInferenceEndpoints } from '../hooks/use_inference_endpoints';
import { TabularPage } from './all_inference_endpoints/tabular_page';
import { InferenceEndpointsHeader } from './inference_endpoints_header';
+import { AddInferenceFlyoutWrapper } from './add_inference_endpoints/add_inference_flyout_wrapper';
export const InferenceEndpoints: React.FC = () => {
const { data } = useQueryInferenceEndpoints();
+ const [isAddInferenceFlyoutOpen, setIsAddInferenceFlyoutOpen] = useState(false);
const inferenceEndpoints = data || [];
return (
<>
-
+
+ {isAddInferenceFlyoutOpen && (
+
+ )}
>
);
};
diff --git a/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints_header.tsx b/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints_header.tsx
index acb7e82db13b2..ce94c4c89194b 100644
--- a/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints_header.tsx
+++ b/x-pack/plugins/search_inference_endpoints/public/components/inference_endpoints_header.tsx
@@ -5,13 +5,18 @@
* 2.0.
*/
-import { EuiPageTemplate, EuiButtonEmpty } from '@elastic/eui';
+import { EuiPageTemplate, EuiButtonEmpty, EuiButton } from '@elastic/eui';
import React from 'react';
import * as i18n from '../../common/translations';
import { docLinks } from '../../common/doc_links';
import { useTrainedModelPageUrl } from '../hooks/use_trained_model_page_url';
-export const InferenceEndpointsHeader: React.FC = () => {
+interface InferenceEndpointsHeaderProps {
+ setIsAddInferenceFlyoutOpen: (state: boolean) => void;
+}
+export const InferenceEndpointsHeader: React.FC = ({
+ setIsAddInferenceFlyoutOpen,
+}) => {
const trainedModelPageUrl = useTrainedModelPageUrl();
return (
@@ -21,6 +26,15 @@ export const InferenceEndpointsHeader: React.FC = () => {
description={i18n.MANAGE_INFERENCE_ENDPOINTS_LABEL}
bottomBorder={true}
rightSideItems={[
+ setIsAddInferenceFlyoutOpen(true)}
+ >
+ {i18n.ADD_ENDPOINT_LABEL}
+ ,
{
+ const queryClient = new QueryClient();
+ return {children};
+};
+
+const mockConfig: any = {
+ provider: 'elasticsearch',
+ taskType: 'text_embedding',
+ inferenceId: 'es-endpoint-1',
+ providerConfig: {
+ num_allocations: 1,
+ num_threads: 2,
+ model_id: '.multilingual-e5-small',
+ },
+};
+const mockSecrets: any = { providerSecrets: {} };
+
+const mockInferenceEndpoint = {
+ config: mockConfig,
+ secrets: mockSecrets,
+};
+
+jest.mock('./use_kibana');
+
+const mockUseKibana = useKibana as jest.Mock;
+const mockAdd = jest.fn();
+const mockAddSuccess = jest.fn();
+const mockAddError = jest.fn();
+const mockOnSuccess = jest.fn();
+const mockOnError = jest.fn();
+
+describe('useAddEndpoint', () => {
+ beforeEach(() => {
+ mockUseKibana.mockReturnValue({
+ services: {
+ http: {
+ put: mockAdd,
+ },
+ notifications: {
+ toasts: {
+ addSuccess: mockAddSuccess,
+ addError: mockAddError,
+ },
+ },
+ },
+ });
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('show call add inference endpoint and show success toast', async () => {
+ const { result } = renderHook(() => useAddEndpoint(mockOnSuccess, mockOnError), { wrapper });
+
+ result.current.mutate({ inferenceEndpoint: mockInferenceEndpoint });
+
+ await waitFor(() =>
+ expect(mockAdd).toHaveBeenCalledWith(
+ '/internal/inference_endpoint/endpoints/text_embedding/es-endpoint-1',
+ {
+ body: JSON.stringify(mockInferenceEndpoint),
+ }
+ )
+ );
+ expect(mockAddSuccess).toHaveBeenCalledWith({
+ title: i18n.ENDPOINT_ADDED_SUCCESS,
+ });
+ expect(mockOnSuccess).toHaveBeenCalled();
+ expect(mockOnError).not.toHaveBeenCalled();
+ });
+
+ it('should show error toast on failure', async () => {
+ const error = { body: { message: 'error' } };
+ mockAdd.mockRejectedValue(error);
+ const { result } = renderHook(() => useAddEndpoint(mockOnSuccess, mockOnError), { wrapper });
+
+ result.current.mutate({ inferenceEndpoint: mockInferenceEndpoint });
+
+ await waitFor(() => {
+ expect(mockAddError).toHaveBeenCalled();
+ expect(mockOnSuccess).not.toHaveBeenCalled();
+ expect(mockOnError).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/x-pack/plugins/search_inference_endpoints/public/hooks/use_add_endpoint.ts b/x-pack/plugins/search_inference_endpoints/public/hooks/use_add_endpoint.ts
new file mode 100644
index 0000000000000..a0f64db518cca
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/hooks/use_add_endpoint.ts
@@ -0,0 +1,55 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { useMutation, useQueryClient } from '@tanstack/react-query';
+
+import { KibanaServerError } from '@kbn/kibana-utils-plugin/common';
+import { useKibana } from './use_kibana';
+import * as i18n from './translations';
+import { INFERENCE_ENDPOINTS_QUERY_KEY } from '../../common/constants';
+import { InferenceEndpoint } from '../types';
+
+interface MutationArgs {
+ inferenceEndpoint: InferenceEndpoint;
+}
+
+export const useAddEndpoint = (onSuccess?: () => void, onError?: () => void) => {
+ const queryClient = useQueryClient();
+ const { services } = useKibana();
+ const toasts = services.notifications?.toasts;
+
+ return useMutation(
+ async ({ inferenceEndpoint }: MutationArgs) => {
+ return await services.http.put<{}>(
+ `/internal/inference_endpoint/endpoints/${inferenceEndpoint.config.taskType}/${inferenceEndpoint.config.inferenceId}`,
+ {
+ body: JSON.stringify(inferenceEndpoint),
+ }
+ );
+ },
+ {
+ onSuccess: () => {
+ queryClient.invalidateQueries([INFERENCE_ENDPOINTS_QUERY_KEY]);
+ toasts?.addSuccess({
+ title: i18n.ENDPOINT_ADDED_SUCCESS,
+ });
+ if (onSuccess) {
+ onSuccess();
+ }
+ },
+ onError: (error: { body: KibanaServerError }) => {
+ toasts?.addError(new Error(error.body.message), {
+ title: i18n.ENDPOINT_CREATION_FAILED,
+ toastMessage: error.body.message,
+ });
+ if (onError) {
+ onError();
+ }
+ },
+ }
+ );
+};
diff --git a/x-pack/plugins/search_inference_endpoints/public/hooks/use_providers.ts b/x-pack/plugins/search_inference_endpoints/public/hooks/use_providers.ts
new file mode 100644
index 0000000000000..4bef4268c798f
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/hooks/use_providers.ts
@@ -0,0 +1,644 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import type { HttpSetup } from '@kbn/core-http-browser';
+import { useQuery } from '@tanstack/react-query';
+import { FieldType, InferenceProvider } from '@kbn/inference-endpoint-ui-common';
+import { KibanaServerError } from '@kbn/kibana-utils-plugin/common';
+import { useKibana } from './use_kibana';
+import * as i18n from './translations';
+
+const getProviders = (http: HttpSetup): InferenceProvider[] => {
+ return [
+ {
+ service: 'alibabacloud-ai-search',
+ name: 'AlibabaCloud AI Search',
+ task_types: ['text_embedding', 'sparse_embedding', 'rerank', 'completion'],
+ configurations: {
+ workspace: {
+ default_value: null,
+ description: 'The name of the workspace used for the {infer} task.',
+ label: 'Workspace',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_key: {
+ default_value: null,
+ description: `A valid API key for the AlibabaCloud AI Search API.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ service_id: {
+ default_value: null,
+ description: 'The name of the model service to use for the {infer} task.',
+ label: 'Project ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ host: {
+ default_value: null,
+ description:
+ 'The name of the host address used for the {infer} task. You can find the host address at https://opensearch.console.aliyun.com/cn-shanghai/rag/api-key[ the API keys section] of the documentation.',
+ label: 'Host',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ http_schema: {
+ default_value: null,
+ description: '',
+ label: 'HTTP Schema',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'amazonbedrock',
+ name: 'Amazon Bedrock',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ secret_key: {
+ default_value: null,
+ description: 'A valid AWS secret key that is paired with the access_key.',
+ label: 'Secret Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ provider: {
+ default_value: null,
+ description: 'The model provider for your deployment.',
+ label: 'Provider',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ access_key: {
+ default_value: null,
+ description: 'A valid AWS access key that has permissions to use Amazon Bedrock.',
+ label: 'Access Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ model: {
+ default_value: null,
+ description:
+ 'The base model ID or an ARN to a custom model based on a foundational model.',
+ label: 'Model',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'By default, the amazonbedrock service sets the number of requests allowed per minute to 240.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ region: {
+ default_value: null,
+ description: 'The region that your model or ARN is deployed in.',
+ label: 'Region',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'anthropic',
+ name: 'Anthropic',
+ task_types: ['completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'By default, the anthropic service sets the number of requests allowed per minute to 50.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'azureaistudio',
+ name: 'Azure AI Studio',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ endpoint_type: {
+ default_value: null,
+ description: 'Specifies the type of endpoint that is used in your model deployment.',
+ label: 'Endpoint Type',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ provider: {
+ default_value: null,
+ description: 'The model provider for your deployment.',
+ label: 'Provider',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ target: {
+ default_value: null,
+ description: 'The target URL of your Azure AI Studio model deployment.',
+ label: 'Target',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'azureopenai',
+ name: 'Azure OpenAI',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ entra_id: {
+ default_value: null,
+ description: 'You must provide either an API key or an Entra ID.',
+ label: 'Entra ID',
+ required: false,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'The azureopenai service sets a default number of requests allowed per minute depending on the task type.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ deployment_id: {
+ default_value: null,
+ description: 'The deployment name of your deployed models.',
+ label: 'Deployment ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ resource_name: {
+ default_value: null,
+ description: 'The name of your Azure OpenAI resource.',
+ label: 'Resource Name',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_version: {
+ default_value: null,
+ description: 'The Azure API version ID to use.',
+ label: 'API Version',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'cohere',
+ name: 'Cohere',
+ task_types: ['text_embedding', 'rerank', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ },
+ },
+ {
+ service: 'elasticsearch',
+ name: 'Elasticsearch',
+ task_types: ['text_embedding', 'sparse_embedding', 'rerank'],
+ configurations: {
+ num_allocations: {
+ default_value: 1,
+ description:
+ 'The total number of allocations this model is assigned across machine learning nodes.',
+ label: 'Number Allocations',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ num_threads: {
+ default_value: 2,
+ description: 'Sets the number of threads used by each model allocation during inference.',
+ label: 'Number Threads',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: '.multilingual-e5-small',
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'googleaistudio',
+ name: 'Google AI Studio',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: "ID of the LLM you're using.",
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'googlevertexai',
+ name: 'Google Vertex AI',
+ task_types: ['text_embedding', 'rerank'],
+ configurations: {
+ service_account_json: {
+ default_value: null,
+ description: "API Key for the provider you're connecting to.",
+ label: 'Credentials JSON',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ project_id: {
+ default_value: null,
+ description:
+ 'The GCP Project ID which has Vertex AI API(s) enabled. For more information on the URL, refer to the {geminiVertexAIDocs}.',
+ label: 'GCP Project',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ location: {
+ default_value: null,
+ description:
+ 'Please provide the GCP region where the Vertex AI API(s) is enabled. For more information, refer to the {geminiVertexAIDocs}.',
+ label: 'GCP Region',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: `ID of the LLM you're using.`,
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'hugging_face',
+ name: 'Hugging Face',
+ task_types: ['text_embedding', 'sparse_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/embeddings',
+ description: 'The URL endpoint to use for the requests.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'mistral',
+ name: 'Mistral',
+ task_types: ['text_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ model: {
+ default_value: null,
+ description:
+ 'Refer to the Mistral models documentation for the list of available text embedding models.',
+ label: 'Model',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ max_input_tokens: {
+ default_value: null,
+ description: 'Allows you to specify the maximum number of tokens per input.',
+ label: 'Maximum Input Tokens',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ },
+ },
+ {
+ service: 'openai',
+ name: 'OpenAI',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description:
+ 'The OpenAI API authentication key. For more details about generating OpenAI API keys, refer to the https://platform.openai.com/account/api-keys.',
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ organization_id: {
+ default_value: null,
+ description: 'The unique identifier of your organization.',
+ label: 'Organization ID',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'Default number of requests allowed per minute. For text_embedding is 3000. For completion is 500.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/chat/completions',
+ description:
+ 'The OpenAI API endpoint URL. For more information on the URL, refer to the https://platform.openai.com/docs/api-reference.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'watsonxai',
+ name: 'IBM Watsonx',
+ task_types: ['text_embedding'],
+ configurations: {
+ project_id: {
+ default_value: null,
+ description: '',
+ label: 'Project ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_version: {
+ default_value: null,
+ description: 'The IBM Watsonx API version ID to use.',
+ label: 'API Version',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ max_input_tokens: {
+ default_value: null,
+ description: 'Allows you to specify the maximum number of tokens per input.',
+ label: 'Maximum Input Tokens',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ url: {
+ default_value: null,
+ description: '',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ ];
+};
+
+export const useProviders = () => {
+ const { services } = useKibana();
+ const toasts = services.notifications?.toasts;
+ const onErrorFn = (error: { body: KibanaServerError }) => {
+ toasts?.addError(new Error(error.body.message), {
+ title: i18n.GET_PROVIDERS_FAILED,
+ toastMessage: error.body.message,
+ });
+ };
+
+ const query = useQuery(['user-profile'], {
+ queryFn: () => getProviders(services.http),
+ staleTime: Infinity,
+ refetchOnWindowFocus: false,
+ onError: onErrorFn,
+ });
+ return query;
+};
diff --git a/x-pack/plugins/search_inference_endpoints/public/utils/test_utils/test_utils.ts b/x-pack/plugins/search_inference_endpoints/public/utils/test_utils/test_utils.ts
new file mode 100644
index 0000000000000..993b6d620fb1d
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/public/utils/test_utils/test_utils.ts
@@ -0,0 +1,652 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { FieldType } from '@kbn/search-connectors/types';
+import { InferenceProvider } from '../../types';
+
+export const mockProviders: InferenceProvider[] = [
+ {
+ service: 'cohere',
+ name: 'Cohere',
+ task_types: ['text_embedding', 'rerank', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ },
+ },
+ {
+ service: 'elastic',
+ name: 'Elastic',
+ task_types: ['sparse_embedding'],
+ configurations: {
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ max_input_tokens: {
+ default_value: null,
+ description: 'Allows you to specify the maximum number of tokens per input.',
+ label: 'Maximum Input Tokens',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ },
+ },
+ {
+ service: 'watsonxai',
+ name: 'IBM Watsonx',
+ task_types: ['text_embedding'],
+ configurations: {
+ project_id: {
+ default_value: null,
+ description: '',
+ label: 'Project ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_version: {
+ default_value: null,
+ description: 'The IBM Watsonx API version ID to use.',
+ label: 'API Version',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ max_input_tokens: {
+ default_value: null,
+ description: 'Allows you to specify the maximum number of tokens per input.',
+ label: 'Maximum Input Tokens',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ url: {
+ default_value: null,
+ description: '',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'azureaistudio',
+ name: 'Azure AI Studio',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ endpoint_type: {
+ default_value: null,
+ description: 'Specifies the type of endpoint that is used in your model deployment.',
+ label: 'Endpoint Type',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ provider: {
+ default_value: null,
+ description: 'The model provider for your deployment.',
+ label: 'Provider',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ target: {
+ default_value: null,
+ description: 'The target URL of your Azure AI Studio model deployment.',
+ label: 'Target',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'hugging_face',
+ name: 'Hugging Face',
+ task_types: ['text_embedding', 'sparse_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/embeddings',
+ description: 'The URL endpoint to use for the requests.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'amazonbedrock',
+ name: 'Amazon Bedrock',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ secret_key: {
+ default_value: null,
+ description: 'A valid AWS secret key that is paired with the access_key.',
+ label: 'Secret Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ provider: {
+ default_value: null,
+ description: 'The model provider for your deployment.',
+ label: 'Provider',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ access_key: {
+ default_value: null,
+ description: 'A valid AWS access key that has permissions to use Amazon Bedrock.',
+ label: 'Access Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ model: {
+ default_value: null,
+ description: 'The base model ID or an ARN to a custom model based on a foundational model.',
+ label: 'Model',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'By default, the amazonbedrock service sets the number of requests allowed per minute to 240.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ region: {
+ default_value: null,
+ description: 'The region that your model or ARN is deployed in.',
+ label: 'Region',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'anthropic',
+ name: 'Anthropic',
+ task_types: ['completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'By default, the anthropic service sets the number of requests allowed per minute to 50.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'googleaistudio',
+ name: 'Google AI Studio',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: "ID of the LLM you're using.",
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'elasticsearch',
+ name: 'Elasticsearch',
+ task_types: ['text_embedding', 'sparse_embedding', 'rerank'],
+ configurations: {
+ num_allocations: {
+ default_value: 1,
+ description:
+ 'The total number of allocations this model is assigned across machine learning nodes.',
+ label: 'Number Allocations',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ num_threads: {
+ default_value: 2,
+ description: 'Sets the number of threads used by each model allocation during inference.',
+ label: 'Number Threads',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: '.multilingual-e5-small',
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'openai',
+ name: 'OpenAI',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description:
+ 'The OpenAI API authentication key. For more details about generating OpenAI API keys, refer to the https://platform.openai.com/account/api-keys.',
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ organization_id: {
+ default_value: null,
+ description: 'The unique identifier of your organization.',
+ label: 'Organization ID',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'Default number of requests allowed per minute. For text_embedding is 3000. For completion is 500.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/chat/completions',
+ description:
+ 'The OpenAI API endpoint URL. For more information on the URL, refer to the https://platform.openai.com/docs/api-reference.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'azureopenai',
+ name: 'Azure OpenAI',
+ task_types: ['text_embedding', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ entra_id: {
+ default_value: null,
+ description: 'You must provide either an API key or an Entra ID.',
+ label: 'Entra ID',
+ required: false,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'The azureopenai service sets a default number of requests allowed per minute depending on the task type.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ deployment_id: {
+ default_value: null,
+ description: 'The deployment name of your deployed models.',
+ label: 'Deployment ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ resource_name: {
+ default_value: null,
+ description: 'The name of your Azure OpenAI resource.',
+ label: 'Resource Name',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_version: {
+ default_value: null,
+ description: 'The Azure API version ID to use.',
+ label: 'API Version',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'mistral',
+ name: 'Mistral',
+ task_types: ['text_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ model: {
+ default_value: null,
+ description:
+ 'Refer to the Mistral models documentation for the list of available text embedding models.',
+ label: 'Model',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ max_input_tokens: {
+ default_value: null,
+ description: 'Allows you to specify the maximum number of tokens per input.',
+ label: 'Maximum Input Tokens',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ },
+ },
+ {
+ service: 'googlevertexai',
+ name: 'Google Vertex AI',
+ task_types: ['text_embedding', 'rerank'],
+ configurations: {
+ service_account_json: {
+ default_value: null,
+ description: "API Key for the provider you're connecting to.",
+ label: 'Credentials JSON',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ project_id: {
+ default_value: null,
+ description:
+ 'The GCP Project ID which has Vertex AI API(s) enabled. For more information on the URL, refer to the {geminiVertexAIDocs}.',
+ label: 'GCP Project',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ location: {
+ default_value: null,
+ description:
+ 'Please provide the GCP region where the Vertex AI API(s) is enabled. For more information, refer to the {geminiVertexAIDocs}.',
+ label: 'GCP Region',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ model_id: {
+ default_value: null,
+ description: `ID of the LLM you're using.`,
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+ {
+ service: 'alibabacloud-ai-search',
+ name: 'AlibabaCloud AI Search',
+ task_types: ['text_embedding', 'sparse_embedding', 'rerank', 'completion'],
+ configurations: {
+ workspace: {
+ default_value: null,
+ description: 'The name of the workspace used for the {infer} task.',
+ label: 'Workspace',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ api_key: {
+ default_value: null,
+ description: `A valid API key for the AlibabaCloud AI Search API.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ service_id: {
+ default_value: null,
+ description: 'The name of the model service to use for the {infer} task.',
+ label: 'Project ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ host: {
+ default_value: null,
+ description:
+ 'The name of the host address used for the {infer} task. You can find the host address at https://opensearch.console.aliyun.com/cn-shanghai/rag/api-key[ the API keys section] of the documentation.',
+ label: 'Host',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ },
+ http_schema: {
+ default_value: null,
+ description: '',
+ label: 'HTTP Schema',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ },
+ },
+ },
+];
diff --git a/x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.test.ts b/x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.test.ts
new file mode 100644
index 0000000000000..5f906aa1fb8cf
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.test.ts
@@ -0,0 +1,55 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { loggingSystemMock } from '@kbn/core/server/mocks';
+
+import { addInferenceEndpoint } from './add_inference_endpoint';
+
+describe('addInferenceEndpoint', () => {
+ const mockClient: any = {
+ inference: {
+ put: jest.fn(),
+ },
+ };
+
+ const type = 'text_embedding';
+ const id = 'es-endpoint-1';
+ const config: any = {
+ provider: 'elasticsearch',
+ taskType: 'text_embedding',
+ inferenceId: 'es-endpoint-1',
+ providerConfig: {
+ num_allocations: 1,
+ num_threads: 2,
+ model_id: '.multilingual-e5-small',
+ },
+ };
+ const secrets: any = { providerSecrets: {} };
+ const mockLogger = loggingSystemMock.createLogger();
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should call the ES client with correct PUT request', async () => {
+ await addInferenceEndpoint(mockClient, type, id, config, secrets, mockLogger);
+
+ expect(mockClient.inference.put).toHaveBeenCalledWith({
+ inference_id: id,
+ task_type: type,
+ inference_config: {
+ service: 'elasticsearch',
+ service_settings: {
+ num_allocations: 1,
+ num_threads: 2,
+ model_id: '.multilingual-e5-small',
+ },
+ task_settings: {},
+ },
+ });
+ });
+});
diff --git a/x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.ts b/x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.ts
new file mode 100644
index 0000000000000..bbf634a672188
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/server/lib/add_inference_endpoint.ts
@@ -0,0 +1,47 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+import { ElasticsearchClient } from '@kbn/core/server';
+import type { Config, Secrets } from '@kbn/inference-endpoint-ui-common';
+import type { Logger } from '@kbn/logging';
+import { unflattenObject } from '../utils/unflatten_object';
+
+export const addInferenceEndpoint = async (
+ esClient: ElasticsearchClient,
+ type: string,
+ id: string,
+ config: Config,
+ secrets: Secrets,
+ logger: Logger
+) => {
+ try {
+ /* task settings property is required in the API call
+ but no needed for inference or connector creation
+ */
+ const taskSettings = {};
+ const serviceSettings = {
+ ...unflattenObject(config?.providerConfig ?? {}),
+ ...unflattenObject(secrets?.providerSecrets ?? {}),
+ };
+
+ return await esClient.inference.put({
+ inference_id: config?.inferenceId ?? '',
+ task_type: config?.taskType as InferenceTaskType,
+ inference_config: {
+ service: config?.provider,
+ service_settings: serviceSettings,
+ task_settings: taskSettings,
+ },
+ });
+ } catch (e) {
+ logger.warn(
+ `Failed to create inference endpoint for task type "${config?.taskType}" and inference id ${config?.inferenceId}. Error: ${e.message}`
+ );
+ throw e;
+ }
+};
diff --git a/x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.test.ts b/x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.test.ts
new file mode 100644
index 0000000000000..05ba12ccb4cff
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.test.ts
@@ -0,0 +1,37 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { fetchInferenceServices } from './fetch_inference_services';
+import { ElasticsearchClient } from '@kbn/core/server';
+import { mockProviders } from '../../public/utils/test_utils/test_utils';
+
+describe('fetch inference services', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ const mockClient = {
+ asCurrentUser: {
+ transport: {
+ request: jest.fn(),
+ },
+ },
+ };
+ it('returns all inference services', async () => {
+ mockClient.asCurrentUser.transport.request.mockImplementationOnce(() => {
+ return Promise.resolve({ services: mockProviders });
+ });
+
+ const services = await fetchInferenceServices(
+ mockClient.asCurrentUser as unknown as ElasticsearchClient
+ );
+
+ expect(services).toEqual({
+ services: mockProviders,
+ });
+ });
+});
diff --git a/x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.ts b/x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.ts
new file mode 100644
index 0000000000000..d9d4d2e4c071d
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/server/lib/fetch_inference_services.ts
@@ -0,0 +1,26 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { ElasticsearchClient } from '@kbn/core/server';
+import { InferenceProvider } from '../types';
+
+export const fetchInferenceServices = async (
+ client: ElasticsearchClient
+): Promise<{
+ services: InferenceProvider[];
+}> => {
+ const { services } = await client.transport.request<{
+ services: InferenceProvider[];
+ }>({
+ method: 'GET',
+ path: `/_inference/_services`,
+ });
+
+ return {
+ services,
+ };
+};
diff --git a/x-pack/plugins/search_inference_endpoints/server/routes.ts b/x-pack/plugins/search_inference_endpoints/server/routes.ts
index 80d7a15ab99c4..018a3f28d72d4 100644
--- a/x-pack/plugins/search_inference_endpoints/server/routes.ts
+++ b/x-pack/plugins/search_inference_endpoints/server/routes.ts
@@ -9,9 +9,10 @@ import { IRouter } from '@kbn/core/server';
import { schema } from '@kbn/config-schema';
import type { Logger } from '@kbn/logging';
import { fetchInferenceEndpoints } from './lib/fetch_inference_endpoints';
-import { APIRoutes } from './types';
+import { APIRoutes, InferenceEndpoint } from './types';
import { errorHandler } from './utils/error_handler';
import { deleteInferenceEndpoint } from './lib/delete_inference_endpoint';
+import { addInferenceEndpoint } from './lib/add_inference_endpoint';
export function defineRoutes({ logger, router }: { logger: Logger; router: IRouter }) {
router.get(
@@ -35,9 +36,46 @@ export function defineRoutes({ logger, router }: { logger: Logger; router: IRout
})
);
+ router.put(
+ {
+ path: APIRoutes.INFERENCE_ENDPOINT,
+ validate: {
+ params: schema.object({
+ type: schema.string(),
+ id: schema.string(),
+ }),
+ body: schema.object({
+ config: schema.object({
+ inferenceId: schema.string(),
+ provider: schema.string(),
+ taskType: schema.string(),
+ providerConfig: schema.any(),
+ }),
+ secrets: schema.object({
+ providerSecrets: schema.any(),
+ }),
+ }),
+ },
+ },
+ errorHandler(logger)(async (context, request, response) => {
+ const {
+ client: { asCurrentUser },
+ } = (await context.core).elasticsearch;
+
+ const { type, id } = request.params;
+ const { config, secrets }: InferenceEndpoint = request.body;
+ const result = await addInferenceEndpoint(asCurrentUser, type, id, config, secrets, logger);
+
+ return response.ok({
+ body: result,
+ headers: { 'content-type': 'application/json' },
+ });
+ })
+ );
+
router.delete(
{
- path: APIRoutes.DELETE_INFERENCE_ENDPOINT,
+ path: APIRoutes.INFERENCE_ENDPOINT,
validate: {
params: schema.object({
type: schema.string(),
diff --git a/x-pack/plugins/search_inference_endpoints/server/utils/unflatten_object.ts b/x-pack/plugins/search_inference_endpoints/server/utils/unflatten_object.ts
new file mode 100644
index 0000000000000..625f43bb4f8f6
--- /dev/null
+++ b/x-pack/plugins/search_inference_endpoints/server/utils/unflatten_object.ts
@@ -0,0 +1,17 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { set } from '@kbn/safer-lodash-set';
+
+interface GenericObject {
+ [key: string]: any;
+}
+
+export const unflattenObject = (object: object): T =>
+ Object.entries(object).reduce((acc, [key, value]) => {
+ set(acc, key, value);
+ return acc;
+ }, {} as T);
diff --git a/x-pack/plugins/search_inference_endpoints/tsconfig.json b/x-pack/plugins/search_inference_endpoints/tsconfig.json
index f448d36c7f463..da162552248b4 100644
--- a/x-pack/plugins/search_inference_endpoints/tsconfig.json
+++ b/x-pack/plugins/search_inference_endpoints/tsconfig.json
@@ -35,7 +35,12 @@
"@kbn/utility-types",
"@kbn/search-navigation",
"@kbn/shared-ux-page-kibana-template",
- "@kbn/licensing-plugin"
+ "@kbn/licensing-plugin",
+ "@kbn/inference-endpoint-ui-common",
+ "@kbn/es-ui-shared-plugin",
+ "@kbn/search-connectors",
+ "@kbn/core-http-browser",
+ "@kbn/safer-lodash-set"
],
"exclude": [
"target/**/*",
diff --git a/x-pack/test_serverless/functional/page_objects/svl_search_inference_management_page.ts b/x-pack/test_serverless/functional/page_objects/svl_search_inference_management_page.ts
index 4e4c6147e8f77..0cfc7a5447bb9 100644
--- a/x-pack/test_serverless/functional/page_objects/svl_search_inference_management_page.ts
+++ b/x-pack/test_serverless/functional/page_objects/svl_search_inference_management_page.ts
@@ -18,6 +18,7 @@ export function SvlSearchInferenceManagementPageProvider({ getService }: FtrProv
await testSubjects.existOrFail('allInferenceEndpointsPage');
await testSubjects.existOrFail('api-documentation');
await testSubjects.existOrFail('view-your-models');
+ await testSubjects.existOrFail('add-inference-endpoint-header-button');
},
async expectTabularViewToBeLoaded() {
@@ -95,5 +96,26 @@ export function SvlSearchInferenceManagementPageProvider({ getService }: FtrProv
expect((await browser.getClipboardValue()).includes('.elser-2-elasticsearch')).to.be(true);
},
},
+
+ AddInferenceFlyout: {
+ async expectInferenceEndpointToBeVisible() {
+ await testSubjects.click('add-inference-endpoint-header-button');
+ await testSubjects.existOrFail('create-inference-flyout');
+
+ await testSubjects.click('provider-select');
+ await testSubjects.setValue('provider-super-select-search-box', 'Cohere');
+ await testSubjects.click('provider');
+
+ await testSubjects.existOrFail('api_key-password');
+ await testSubjects.click('completion');
+ await testSubjects.existOrFail('inference-endpoint-input-field');
+ (await testSubjects.getVisibleText('inference-endpoint-input-field')).includes(
+ 'cohere-completion'
+ );
+
+ await testSubjects.click('add-inference-endpoint-submit-button');
+ expect(await testSubjects.isEnabled('add-inference-endpoint-submit-button')).to.be(false);
+ },
+ },
};
}
diff --git a/x-pack/test_serverless/functional/test_suites/search/inference_management.ts b/x-pack/test_serverless/functional/test_suites/search/inference_management.ts
index a931324b390b7..939deb7445213 100644
--- a/x-pack/test_serverless/functional/test_suites/search/inference_management.ts
+++ b/x-pack/test_serverless/functional/test_suites/search/inference_management.ts
@@ -98,6 +98,12 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) {
});
});
+ describe('create inference flyout', () => {
+ it('renders successfully', async () => {
+ await pageObjects.svlSearchInferenceManagementPage.AddInferenceFlyout.expectInferenceEndpointToBeVisible();
+ });
+ });
+
it('has embedded dev console', async () => {
await testHasEmbeddedConsole(pageObjects);
});
diff --git a/yarn.lock b/yarn.lock
index b19d415fb949b..32e6ff0a52b92 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -5941,6 +5941,10 @@
version "0.0.0"
uid ""
+"@kbn/inference-endpoint-ui-common@link:x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common":
+ version "0.0.0"
+ uid ""
+
"@kbn/inference-plugin@link:x-pack/platform/plugins/shared/inference":
version "0.0.0"
uid ""