Skip to content

Commit

Permalink
Added missing docstrings for LLM_API
Browse files Browse the repository at this point in the history
  • Loading branch information
NotBioWaste905 committed Dec 9, 2024
1 parent 1c4aa24 commit 419ab8d
Showing 1 changed file with 20 additions and 2 deletions.
22 changes: 20 additions & 2 deletions chatsky/llm/llm_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ def __init__(
system_prompt: Optional[str] = "",
) -> None:
"""
:param model: Model object.
:param system_prompt: System prompt for the model.
:param model: Model object
:param system_prompt: System prompt for the model
"""
check_langchain_available()
self.model: BaseChatModel = model
Expand All @@ -37,6 +37,15 @@ async def respond(
history: list[BaseMessage],
message_schema: Union[None, Type[Message], Type[BaseModel]] = None,
) -> Message:
"""
Process and structure the model's response based on the provided schema.
:param history: List of previous messages in the conversation
:param message_schema: Schema for structuring the output, defaults to None
:return: Processed model response
:raises ValueError: If message_schema is not None, Message, or BaseModel
"""

if message_schema is None:
result = await self.parser.ainvoke(await self.model.ainvoke(history))
Expand All @@ -58,5 +67,14 @@ async def respond(
async def condition(
self, history: list[BaseMessage], method: BaseMethod, return_schema: Optional[BaseModel] = None
) -> bool:
"""
Execute a conditional method on the conversation history.
:param history: List of previous messages in the conversation
:param method: Method to evaluate the condition
:param return_schema: Optional schema for structuring the output
:return: Boolean result of the condition evaluation
"""
result = await method(history, await self.model.agenerate([history], logprobs=True, top_logprobs=10))
return result

0 comments on commit 419ab8d

Please sign in to comment.