You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Running the LangGraph example with api 1.1 set in config i get a type error, this does not happen with api 1.0 but i get all sorts of annoying deprecation messages.
fromtypingimportList, Dictfromlangchain_openaiimportChatOpenAIfromlangchain_core.messagesimportSystemMessage, HumanMessage, AIMessagefromlangchain_core.promptsimportChatPromptTemplate, MessagesPlaceholderfrommem0importMemoryimportosfromdotenvimportload_dotenvfromopenaiimportOpenAIconfig= {
"llm": {
"provider": "openai",
"config": {
"model": "gpt-4o-mini",
# "temperature": 0.2,# "max_tokens": 1500,
}
},
"version": "v1.1"
}
# Load environment variables from .env2 fileload_dotenv(".env2")
openai_client=OpenAI(
api_key=os.getenv("OPENAI_API_KEY"),
# You can set other parameters here if needed
)
# Initialize LangChain and Mem0llm=ChatOpenAI(model="gpt-4o-mini")
mem0=Memory.from_config(config)
prompt=ChatPromptTemplate.from_messages([
SystemMessage(content="""You are a helpful travel agent AI. Use the provided context to personalize your responses and remember user preferences and past interactions. Provide travel recommendations, itinerary suggestions, and answer questions about destinations. If you don't have specific information, you can make general suggestions based on common travel knowledge."""),
MessagesPlaceholder(variable_name="context"),
HumanMessage(content="{input}")
])
defretrieve_context(query: str, user_id: str) ->List[Dict]:
"""Retrieve relevant context from Mem0"""memories=mem0.search(query, user_id=user_id)
seralized_memories=' '.join([mem["memory"] formeminmemories])
context= [
{
"role": "system",
"content": f"Relevant information: {seralized_memories}"
},
{
"role": "user",
"content": query
}
]
returncontextdefgenerate_response(input: str, context: List[Dict]) ->str:
"""Generate a response using the language model"""chain=prompt|llmresponse=chain.invoke({
"context": context,
"input": input
})
returnresponse.contentdefsave_interaction(user_id: str, user_input: str, assistant_response: str):
"""Save the interaction to Mem0"""interaction= [
{
"role": "user",
"content": user_input
},
{
"role": "assistant",
"content": assistant_response
}
]
mem0.add(interaction, user_id=user_id)
defgenerate_response(input: str, context: List[Dict]) ->str:
"""Generate a response using the language model"""chain=prompt|llmresponse=chain.invoke({
"context": context,
"input": input
})
returnresponse.contentdefsave_interaction(user_id: str, user_input: str, assistant_response: str):
"""Save the interaction to Mem0"""interaction= [
{
"role": "user",
"content": user_input
},
{
"role": "assistant",
"content": assistant_response
}
]
mem0.add(interaction, user_id=user_id)
defchat_turn(user_input: str, user_id: str) ->str:
# Retrieve contextcontext=retrieve_context(user_input, user_id)
# Generate responseresponse=generate_response(user_input, context)
# Save interactionsave_interaction(user_id, user_input, response)
returnresponseif__name__=="__main__":
print("Welcome to your personal Travel Agent Planner! How can I assist you with your travel plans today?")
user_id="john1"whileTrue:
user_input=input("You: ")
ifuser_input.lower() in ['quit', 'exit', 'bye']:
print("Travel Agent: Thank you for using our travel planning service. Have a great trip!")
breakresponse=chat_turn(user_input, user_id)
print(f"Travel Agent: {response}")
Please replace your line: seralized_memories = ' '.join([mem["memory"] for mem in memories]) with seralized_memories = ' '.join([mem['memory'] for mem in res['results']])
🐛 Describe the bug
Running the LangGraph example with api 1.1 set in config i get a type error, this does not happen with api 1.0 but i get all sorts of annoying deprecation messages.
error:
The text was updated successfully, but these errors were encountered: