From ff8126712120b55d01899da1d1049b829acf3a72 Mon Sep 17 00:00:00 2001 From: NotBioWaste905 Date: Wed, 13 Nov 2024 17:27:18 +0300 Subject: [PATCH] Moved docstring, removed pipeline parameter --- chatsky/conditions/llm.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/chatsky/conditions/llm.py b/chatsky/conditions/llm.py index 90bcb4f46..baae30ea6 100644 --- a/chatsky/conditions/llm.py +++ b/chatsky/conditions/llm.py @@ -3,18 +3,17 @@ class LLMCondition(BaseCondition): + """ + Basic function for using LLM in condition cases. + + :param model_name: Key of the model from the `Pipeline.models` dictionary. + :param prompt: Prompt for the model to use on users input. + :param method: Method that takes models output and returns boolean. + """ model_name: str prompt: str method: BaseMethod - pipeline: Pipeline async def call(self, ctx: Context) -> bool: - """ - Basic function for using LLM in condition cases. - - :param model_name: Key of the model from the `Pipeline.models` dictionary. - :param prompt: Prompt for the model to use on users input. - :param method: Method that takes models output and returns boolean. - """ - model = self.pipeline.models[self.model_name] + model = ctx.pipeline.models[self.model_name] return await model.condition(self.prompt, self.method)