Skip to content

Commit

Permalink
feat: add model for 01.ai, yi-chat-34b series (#2865)
Browse files Browse the repository at this point in the history
  • Loading branch information
soulteary authored Mar 17, 2024
1 parent a8e694c commit f770232
Show file tree
Hide file tree
Showing 13 changed files with 238 additions and 0 deletions.
1 change: 1 addition & 0 deletions api/core/model_runtime/model_providers/_position.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
- jina
- chatglm
- xinference
- yi
- openllm
- localai
- openai_api_compatible
Empty file.
20 changes: 20 additions & 0 deletions api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
20 changes: 20 additions & 0 deletions api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file.
3 changes: 3 additions & 0 deletions api/core/model_runtime/model_providers/yi/llm/_position.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
- yi-34b-chat-0205
- yi-34b-chat-200k
- yi-vl-plus
30 changes: 30 additions & 0 deletions api/core/model_runtime/model_providers/yi/llm/llm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from collections.abc import Generator
from typing import Optional, Union

from core.model_runtime.entities.llm_entities import LLMResult
from core.model_runtime.entities.message_entities import (
PromptMessage,
PromptMessageTool,
)
from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel


class YiLargeLanguageModel(OAIAPICompatLargeLanguageModel):
def _invoke(self, model: str, credentials: dict,
prompt_messages: list[PromptMessage], model_parameters: dict,
tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None,
stream: bool = True, user: Optional[str] = None) \
-> Union[LLMResult, Generator]:
self._add_custom_parameters(credentials)
return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream)

def validate_credentials(self, model: str, credentials: dict) -> None:
self._add_custom_parameters(credentials)
super().validate_credentials(model, credentials)

@staticmethod
def _add_custom_parameters(credentials: dict) -> None:
credentials['mode'] = 'chat'

if 'endpoint_url' not in credentials or credentials['endpoint_url'] == "":
credentials['endpoint_url'] = 'https://api.lingyiwanwu.com/v1'
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
model: yi-34b-chat-0205
label:
zh_Hans: yi-34b-chat-0205
en_US: yi-34b-chat-0205
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 4096
parameter_rules:
- name: max_tokens
use_template: max_tokens
type: int
default: 512
min: 1
max: 4096
- name: temperature
use_template: temperature
type: float
default: 0.7
min: 0
max: 2
pricing:
input: '0.0025'
output: '0.0025'
unit: '0.00001'
currency: RMB
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
model: yi-34b-chat-200k
label:
zh_Hans: yi-34b-chat-200k
en_US: yi-34b-chat-200k
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 200000
parameter_rules:
- name: max_tokens
use_template: max_tokens
type: int
default: 1024
min: 1
max: 200000
- name: temperature
use_template: temperature
type: float
default: 0.7
min: 0
max: 2
pricing:
input: '0.012'
output: '0.012'
unit: '0.00001'
currency: RMB
28 changes: 28 additions & 0 deletions api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
model: yi-vl-plus
label:
zh_Hans: yi-vl-plus
en_US: yi-vl-plus
model_type: llm
features:
- vision
model_properties:
mode: chat
context_size: 4096
parameter_rules:
- name: max_tokens
use_template: max_tokens
type: int
default: 512
min: 1
max: 4096
- name: temperature
use_template: temperature
type: float
default: 0.7
min: 0
max: 2
pricing:
input: '0.01'
output: '0.03'
unit: '0.001'
currency: USD
32 changes: 32 additions & 0 deletions api/core/model_runtime/model_providers/yi/yi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import logging

from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.model_provider import ModelProvider

logger = logging.getLogger(__name__)


class YiProvider(ModelProvider):

def validate_provider_credentials(self, credentials: dict) -> None:
"""
Validate provider credentials
if validate failed, raise exception
:param credentials: provider credentials, credentials form defined in `provider_credential_schema`.
"""
try:
model_instance = self.get_model_instance(ModelType.LLM)

# Use `yi-34b-chat-0205` model for validate,
# no matter what model you pass in, text completion model or chat model
model_instance.validate_credentials(
model='yi-34b-chat-0205',
credentials=credentials
)
except CredentialsValidateFailedError as ex:
raise ex
except Exception as ex:
logger.exception(f'{self.get_provider_schema().provider} credentials validate failed')
raise ex
41 changes: 41 additions & 0 deletions api/core/model_runtime/model_providers/yi/yi.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
provider: yi
label:
en_US: 01.AI
zh_Hans: 零一万物
description:
en_US: Models provided by 01.AI, such as yi-34b-chat and yi-vl-plus.
zh_Hans: 零一万物提供的模型,例如 yi-34b-chat 和 yi-vl-plus。
icon_small:
en_US: icon_s_en.svg
icon_large:
en_US: icon_l_en.svg
background: "#EFFDFD"
help:
title:
en_US: Get your API Key from 01.ai
zh_Hans: 从零一万物获取 API Key
url:
en_US: https://platform.lingyiwanwu.com/apikeys
supported_model_types:
- llm
configurate_methods:
- predefined-model
provider_credential_schema:
credential_form_schemas:
- variable: api_key
label:
en_US: API Key
type: secret-input
required: true
placeholder:
zh_Hans: 在此输入您的 API Key
en_US: Enter your API Key
- variable: endpoint_url
label:
zh_Hans: 自定义 API endpoint 地址
en_US: CUstom API endpoint URL
type: text-input
required: false
placeholder:
zh_Hans: Base URL, e.g. https://api.lingyiwanwu.com/v1
en_US: Base URL, e.g. https://api.lingyiwanwu.com/v1

0 comments on commit f770232

Please sign in to comment.