From 60bea320e95609e0a127eba5d0da3ac788359ed6 Mon Sep 17 00:00:00 2001 From: garyzhang99 Date: Fri, 10 May 2024 16:14:08 +0800 Subject: [PATCH] remove api_key and api_key_name params, and refer to docs --- README.md | 2 +- README_ZH.md | 2 +- .../en/source/tutorial/203-model.md | 4 +- .../zh_CN/source/tutorial/203-model.md | 4 +- .../litellm_chat_template.json | 8 +--- src/agentscope/models/litellm_model.py | 39 +++++++++++++------ 6 files changed, 33 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index 465a16a72..c450685d8 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ services and third-party model APIs. | ollama | Chat | [`OllamaChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#ollama-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/ollama_chat_template.json) | llama3, llama2, Mistral, ... | | | Embedding | [`OllamaEmbeddingWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#ollama-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/ollama_embedding_template.json) | llama2, Mistral, ... | | | Generation | [`OllamaGenerationWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#ollama-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/ollama_generate_template.json) | llama2, Mistral, ... | -| LiteLLM API | Chat | [`LiteLLMChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#litellm-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/litellm_chat_template.json) | ... | +| LiteLLM API | Chat | [`LiteLLMChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#litellm-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/litellm_chat_template.json) | [models supported by litellm](https://docs.litellm.ai/docs/)... | | Post Request based API | - | [`PostAPIModelWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#post-request-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/postapi_model_config_template.json) | - | **Supported Local Model Deployment** diff --git a/README_ZH.md b/README_ZH.md index 64f916515..47ad48b9e 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -76,7 +76,7 @@ AgentScope提供了一系列`ModelWrapper`来支持本地模型服务和第三 | ollama | Chat | [`OllamaChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#ollama-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/ollama_chat_template.json) | llama3, llama2, Mistral, ... | | | Embedding | [`OllamaEmbeddingWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#ollama-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/ollama_embedding_template.json) | llama2, Mistral, ... | | | Generation | [`OllamaGenerationWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#ollama-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/ollama_generate_template.json) | llama2, Mistral, ... | -| LiteLLM API | Chat | [`LiteLLMChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#litellm-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/litellm_chat_template.json) | ... | +| LiteLLM API | Chat | [`LiteLLMChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#litellm-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/litellm_chat_template.json) | [models supported by litellm](https://docs.litellm.ai/docs/)... | | Post Request based API | - | [`PostAPIModelWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | [guidance](https://modelscope.github.io/agentscope/en/tutorial/203-model.html#post-request-api)
[template](https://github.com/modelscope/agentscope/blob/main/examples/model_configs_template/postapi_model_config_template.json) | - | **支持的本地模型部署** diff --git a/docs/sphinx_doc/en/source/tutorial/203-model.md b/docs/sphinx_doc/en/source/tutorial/203-model.md index 8d7dac9a9..d6e153d0f 100644 --- a/docs/sphinx_doc/en/source/tutorial/203-model.md +++ b/docs/sphinx_doc/en/source/tutorial/203-model.md @@ -453,9 +453,7 @@ com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py">agen { "config_name": "lite_llm_openai_chat_gpt-3.5-turbo", "model_type": "litellm_chat", - "model_name": "gpt-3.5-turbo", - "api_key": "{your_api_key}", - "api_key_name": "OPENAI_API_KEY" + "model_name": "gpt-3.5-turbo" # You should note that for different models, you should set the corresponding environment variables, such as OPENAI_API_KEY, etc. You may refer to https://docs.litellm.ai/docs/ for this. }, ``` diff --git a/docs/sphinx_doc/zh_CN/source/tutorial/203-model.md b/docs/sphinx_doc/zh_CN/source/tutorial/203-model.md index c441ea8c0..7b912cbf2 100644 --- a/docs/sphinx_doc/zh_CN/source/tutorial/203-model.md +++ b/docs/sphinx_doc/zh_CN/source/tutorial/203-model.md @@ -448,9 +448,7 @@ com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py">agen { "config_name": "lite_llm_openai_chat_gpt-3.5-turbo", "model_type": "litellm_chat", - "model_name": "gpt-3.5-turbo", - "api_key": "{your_api_key}", - "api_key_name": "OPENAI_API_KEY" + "model_name": "gpt-3.5-turbo" # You should note that for different models, you should set the corresponding environment variables, such as OPENAI_API_KEY, etc. You may refer to https://docs.litellm.ai/docs/ for this. }, ``` diff --git a/examples/model_configs_template/litellm_chat_template.json b/examples/model_configs_template/litellm_chat_template.json index 57a856acd..f1711dca9 100644 --- a/examples/model_configs_template/litellm_chat_template.json +++ b/examples/model_configs_template/litellm_chat_template.json @@ -1,15 +1,11 @@ [{ "config_name": "lite_llm_openai_chat_gpt-3.5-turbo", "model_type": "litellm_chat", - "model_name": "gpt-3.5-turbo", - "api_key": "{your_api_key}", - "api_key_name": "OPENAI_API_KEY" + "model_name": "gpt-3.5-turbo" }, { "config_name": "lite_llm_claude3", "model_type": "litellm_chat", - "model_name": "claude-3-opus-20240229", - "api_key": "{your_api_key}", - "api_key_name": "ANTHROPIC_API_KEY" + "model_name": "claude-3-opus-20240229" } ] diff --git a/src/agentscope/models/litellm_model.py b/src/agentscope/models/litellm_model.py index 8a1417c80..e0d8096a3 100644 --- a/src/agentscope/models/litellm_model.py +++ b/src/agentscope/models/litellm_model.py @@ -2,7 +2,6 @@ """Model wrapper based on litellm https://docs.litellm.ai/docs/""" from abc import ABC from typing import Union, Any, List, Sequence -import os from loguru import logger @@ -23,24 +22,32 @@ def __init__( self, config_name: str, model_name: str = None, - api_key: str = None, - api_key_name: str = None, generate_args: dict = None, **kwargs: Any, ) -> None: """ + To use the LiteLLM wrapper, environent variables must be set. + Different model_name could be using different environment variables. + For example: + - for model_name: "gpt-3.5-turbo", you need to set "OPENAI_API_KEY" + ``` + os.environ["OPENAI_API_KEY"] = "your-api-key" + ``` + - for model_name: "claude-2", you need to set "ANTHROPIC_API_KEY" + - for Azure OpenAI, you need to set "AZURE_API_KEY", + "AZURE_API_BASE", "AZURE_API_VERSION" + You should refer to the docs in https://docs.litellm.ai/docs/ . Args: config_name (`str`): The name of the model config. model_name (`str`, default `None`): The name of the model to use in OpenAI API. - api_key (`str`, default `None`): - The API key used. - api_key_name (`str`, default `None`): - The API key name used, related to the model_name. generate_args (`dict`, default `None`): The extra keyword arguments used in litellm api generation, e.g. `temperature`, `seed`. + For generate_args, please refer to + https://docs.litellm.ai/docs/completion/input + for more detailes. """ @@ -57,10 +64,6 @@ def __init__( self.model_name = model_name self.generate_args = generate_args or {} - self.api_key = api_key - self.api_key_name = api_key_name - if api_key is not None and api_key_name is not None: - os.environ[api_key_name] = api_key self._register_default_metrics() def format( @@ -75,7 +78,19 @@ def format( class LiteLLMChatWrapper(LiteLLMWrapperBase): - """The model wrapper based on litellm chat API.""" + """The model wrapper based on litellm chat API. + To use the LiteLLM wrapper, environent variables must be set. + Different model_name could be using different environment variables. + For example: + - for model_name: "gpt-3.5-turbo", you need to set "OPENAI_API_KEY" + ``` + os.environ["OPENAI_API_KEY"] = "your-api-key" + ``` + - for model_name: "claude-2", you need to set "ANTHROPIC_API_KEY" + - for Azure OpenAI, you need to set "AZURE_API_KEY", + "AZURE_API_BASE", "AZURE_API_VERSION" + You should refer to the docs in https://docs.litellm.ai/docs/ . + """ model_type: str = "litellm_chat"