diff --git a/kendra_retriever_samples/app.py b/kendra_retriever_samples/app.py index 34613b9..a440056 100644 --- a/kendra_retriever_samples/app.py +++ b/kendra_retriever_samples/app.py @@ -79,7 +79,7 @@ def read_properties_file(filename): else: raise Exception("Unsupported LLM: ", sys.argv[1]) else: - raise Exception("Usage: streamlit run app.py ") + raise Exception("Usage: streamlit run app.py ") if 'chat_history' not in st.session_state: st.session_state['chat_history'] = [] diff --git a/kendra_retriever_samples/kendra_chat_bedrock_claudev2.py b/kendra_retriever_samples/kendra_chat_bedrock_claudev2.py index b1b98c3..74198bc 100644 --- a/kendra_retriever_samples/kendra_chat_bedrock_claudev2.py +++ b/kendra_retriever_samples/kendra_chat_bedrock_claudev2.py @@ -23,17 +23,22 @@ class bcolors: def build_chain(): region = os.environ["AWS_REGION"] kendra_index_id = os.environ["KENDRA_INDEX_ID"] - credentials_profile_name = os.environ['AWS_PROFILE'] - print(credentials_profile_name) - - - llm = Bedrock( + if "AWS_PROFILE" in os.environ: + credentials_profile_name = os.environ['AWS_PROFILE'] + print("Using " + credentials_profile_name + " profile.") + llm = Bedrock( credentials_profile_name=credentials_profile_name, region_name = region, model_kwargs={"max_tokens_to_sample":300,"temperature":1,"top_k":250,"top_p":0.999,"anthropic_version":"bedrock-2023-05-31"}, model_id="anthropic.claude-v2" - ) + ) + else: + llm = Bedrock( + region_name = region, + model_kwargs={"max_tokens_to_sample":300,"temperature":1,"top_k":250,"top_p":0.999,"anthropic_version":"bedrock-2023-05-31"}, + model_id="anthropic.claude-v2" + ) retriever = AmazonKendraRetriever(index_id=kendra_index_id,top_k=5,region_name=region) diff --git a/kendra_retriever_samples/requirements.txt b/kendra_retriever_samples/requirements.txt index fbf3f7a..9cc12f1 100644 --- a/kendra_retriever_samples/requirements.txt +++ b/kendra_retriever_samples/requirements.txt @@ -1,4 +1,4 @@ -langchain==0.0.326 +langchain==0.0.329 boto3>=1.28.27 openai anthropic