Skip to content

Commit

Permalink
chore(llm): add docstring for the base and aws llm controllers
Browse files Browse the repository at this point in the history
  • Loading branch information
kkiani committed Oct 7, 2024
1 parent 9573867 commit 1a4baa5
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 2 deletions.
29 changes: 27 additions & 2 deletions src/damavand/base/controllers/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,32 @@


class LlmController(ApplicationController):
""" """
"""
Base class for LLM Controllers. This class provides the basic functionality for interacting with LLM APIs. The LLM APIs are following the OpenAI Chat Completions API model. For more information, see the [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat/create).
LLM Controllers are using vLLM as backend for hardware optimization and serving open source models. For available list of models, see the [vLLM documentation](https://docs.vllm.ai/en/latest/models/supported_models.html).
Parameters
----------
name : str
The name of the controller.
model : Optional[str]
The model name or ID.
tags : dict[str, str]
Methods
-------
model_id
Return the model name/ID.
base_url
Return the base URL for the LLM API.
default_api_key
Return the default API key.
chat_completions_url
Return the chat completions URL.
client
Return an OpenAI client as an standared interface for interacting with deployed LLM APIs.
"""

def __init__(
self,
Expand Down Expand Up @@ -57,7 +82,7 @@ def chat_completions_url(self) -> str:
@runtime
@cache
def client(self) -> "openai.OpenAI": # type: ignore # noqa
"""Return an OpenAI client."""
"""Return an OpenAI client as an standared interface for interacting with deployed LLM APIs."""

try:
import openai # type: ignore # noqa
Expand Down
23 changes: 23 additions & 0 deletions src/damavand/cloud/aws/controllers/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,29 @@


class AwsLlmController(LlmController):
"""
AWS implementation of the LLM Controller. You can check LlmController for more information.
Parameters
----------
name : str
The name of the controller.
region : str
The AWS region.
model : Optional[str]
The model name or ID.
tags : dict[str, str]
Methods
-------
base_url
Return the base URL for the LLM API.
default_api_key
Return the default API key.
resource
Return the Pulumi IaC AwsVllmComponent object.
"""

def __init__(
self,
name,
Expand Down

0 comments on commit 1a4baa5

Please sign in to comment.