From ccb0697bb579071581e73221c90ba1b894f9e9b4 Mon Sep 17 00:00:00 2001 From: Bill Metangmo <25366207+billmetangmo@users.noreply.github.com> Date: Thu, 14 Dec 2023 10:41:44 +0100 Subject: [PATCH] hotfix: context too long coz no sessions pass to 16k tokens to avoid 4k limit --- etl/experiments/ui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/etl/experiments/ui.py b/etl/experiments/ui.py index 9bf84bf..4e0e0a5 100644 --- a/etl/experiments/ui.py +++ b/etl/experiments/ui.py @@ -38,7 +38,7 @@ vectors = FAISS.from_documents(data, embeddings) vectors.save_local(embedding_pth) -llm = ChatOpenAI(max_tokens=500, temperature=0, model_name="gpt-3.5-turbo") +llm = ChatOpenAI(max_tokens=500, temperature=0, model_name="gpt-3.5-turbo-16k") chain_type_kwargs = {"prompt": CHAT_PROMPT}