Skip to content

Commit

Permalink
clean up Generalist agent a little. fix to materiality agent no files…
Browse files Browse the repository at this point in the history
… found. tweak to report director messaging
  • Loading branch information
IMladjenovic committed Dec 20, 2024
1 parent 7d9e146 commit 688533e
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 46 deletions.
39 changes: 3 additions & 36 deletions backend/src/agents/generalist_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,39 +17,6 @@
)
class GeneralistAgent(ChatAgent):
async def invoke(self, utterance) -> str:
try:
answer_to_user = await answer_user_question(utterance, self.llm, self.model)
answer_result = json.loads(answer_to_user)
final_answer = json.loads(answer_result["response"]).get("answer", "")
if not final_answer:
response = {"content": "Error in answer format.", "ignore_validation": "false"}
return json.dumps(response, indent=4)
logger.info(f"Answer found successfully {final_answer}")
response = {"content": final_answer, "ignore_validation": "false"}
return json.dumps(response, indent=4)

except Exception as e:
logger.error(f"Error in web_general_search_core: {e}")
return "An error occurred while processing the search query."


async def answer_user_question(search_query, llm, model) -> str:
try:
summariser_prompt = engine.load_prompt("generalist-answer", question=search_query)
response = await llm.chat(model, summariser_prompt, "")
return json.dumps(
{
"status": "success",
"response": response,
"error": None,
}
)
except Exception as e:
logger.error(f"Error during create search term: {e}")
return json.dumps(
{
"status": "error",
"response": None,
"error": str(e),
}
)
summariser_prompt = engine.load_prompt("generalist-answer", question=utterance)
response = await self.llm.chat(self.model, summariser_prompt, "")
return json.dumps({"content": response, "ignore_validation": "false"}, indent=4)
17 changes: 9 additions & 8 deletions backend/src/agents/materiality_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,15 @@ def create_llm_files(filenames: list[str]) -> list[LLMFile]:
class MaterialityAgent(ChatAgent):
async def invoke(self, utterance: str) -> str:
materiality_files = await self.select_material_files(utterance)
if not materiality_files:
return f"Materiality Agent cannot find suitable reference documents to answer the question: {utterance}"
answer = await self.llm.chat_with_file(
self.model,
system_prompt=engine.load_prompt("answer-materiality-question"),
user_prompt=utterance,
files=create_llm_files(materiality_files)
)
if materiality_files:
answer = await self.llm.chat_with_file(
self.model,
system_prompt=engine.load_prompt("answer-materiality-question"),
user_prompt=utterance,
files=create_llm_files(materiality_files)
)
else:
answer = f"Materiality Agent cannot find suitable reference documents to answer the question: {utterance}"
return json.dumps({"content": answer, "ignore_validation": False})

async def list_material_topics_for_company(self, company_name: str) -> dict[str, str]:
Expand Down
4 changes: 2 additions & 2 deletions backend/src/directors/report_director.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ def create_report_chat_message(filename: str, company_name: str, topics: dict[st
if topics:
topics_with_markdown = [f"{key}\n{value}" for key, value in topics.items()]
report_chat_message += f"""
The following materiality topics were identified for {company_name} which the report focuses on:
The following materiality topics were identified for {company_name}:
{"\n\n".join(topics_with_markdown)}"""
return report_chat_message

0 comments on commit 688533e

Please sign in to comment.