Skip to content

Commit

Permalink
FS-70: Simplify logs by removing or demoting logs to debug (#9)
Browse files Browse the repository at this point in the history
  • Loading branch information
CLeopard99 authored Oct 29, 2024
1 parent 5766997 commit 1c3321d
Show file tree
Hide file tree
Showing 8 changed files with 12 additions and 16 deletions.
1 change: 0 additions & 1 deletion backend/src/api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ async def suggestions():
logger.info("Requesting chat suggestions")
try:
final_result = await generate_suggestions()
logger.info(f"Chat suggestions: {final_result}")
return JSONResponse(status_code=200, content=final_result)
except Exception as e:
logger.exception(e)
Expand Down
2 changes: 1 addition & 1 deletion backend/src/llm/count_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def reset(self):
def count_calls(func):
def wrapper(self=None, *args, **kwargs):
counter.increment()
logging.info(f"Function {func.__name__} has been called {counter.count} times")
logging.debug(f"Function {func.__name__} has been called {counter.count} times")
return func(self, *args, **kwargs)

counter.reset()
Expand Down
10 changes: 7 additions & 3 deletions backend/src/llm/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,20 @@ async def chat(self, model, system_prompt: str, user_prompt: str, return_json=Fa
{"role": "user", "content": user_prompt},
],
temperature=0,
response_format={"type": "json_object"} if return_json else NOT_GIVEN,
response_format={
"type": "json_object"} if return_json else NOT_GIVEN,
)
logger.info("OpenAI response: {0}".format(response))
content = response.choices[0].message.content
logger.info(f"OpenAI response: Finish reason: {
response.choices[0].finish_reason}, Content: {content}")
logger.debug(f"Token data: {response.usage}")

if isinstance(content, str):
return content
elif isinstance(content, list):
return " ".join(content)
else:
return "Unexpected content format"
except Exception as e:
logger.error("Error calling OpenAI model: {0}".format(e))
logger.error(f"Error calling OpenAI model: {e}")
return "An error occurred while processing the request."
5 changes: 1 addition & 4 deletions backend/src/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,7 @@ async def build_plan(task, llm: LLM, scratchpad, model):
await publish_log_info(LogPrefix.USER, f"Scratchpad so far: {scratchpad}", __name__)
best_next_step = await llm.chat(model, response_format_prompt, best_next_step_prompt, return_json=True)

plan = to_json(best_next_step, "Failed to interpret LLM next step format from step string")
await publish_log_info(LogPrefix.USER, f"Next best step response: {json.dumps(plan, indent=4)}", __name__)

return plan
return to_json(best_next_step, "Failed to interpret LLM next step format from step string")


def find_agent_from_name(name):
Expand Down
1 change: 0 additions & 1 deletion backend/src/session/redis_session_middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ def get_redis_session(request: Request):
if session_data and isinstance(session_data, str):
parsed_session_data = try_parse_to_json(session_data)
if parsed_session_data:
logger.info(f"Parsed session data: {parsed_session_data}")
return parsed_session_data
return {}

5 changes: 1 addition & 4 deletions backend/src/suggestions_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,8 @@
from src.session import Message, get_session_chat
from src.utils.config import Config

import logging

config = Config()
engine = PromptEngine()
logger = logging.getLogger(__name__)
suggestions_prompt = engine.load_prompt("generate_message_suggestions")
model = config.suggestions_model

Expand Down Expand Up @@ -38,7 +35,7 @@ def get_suggestions_model() -> str:
def get_chat_history() -> List[str] | str:
max_history_length = 4
raw_history = get_session_chat()
logger.info(f"Raw history: {raw_history}")

if raw_history is None:
return "No chat history available."

Expand Down
2 changes: 1 addition & 1 deletion backend/src/supervisors/supervisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ async def solve_task(task, scratchpad, attempt=0) -> Tuple[str, str, str]:
raise Exception(unsolvable_response)

agent = await get_agent_for_task(task, scratchpad)
logger.info(f"Agent selected: {agent}")
if agent is None:
raise Exception(no_agent_response)
logger.info(f"Agent selected: {agent.name}")
logger.info(f"Task is {task}")
answer = await agent.invoke(task)
parsed_json = json.loads(answer)
Expand Down
2 changes: 1 addition & 1 deletion backend/src/utils/scratchpad.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ def update_scratchpad(agent_name=None, question=None, result=None, error=None):


def clear_scratchpad():
logger.info("Scratchpad cleared")
logger.debug("Scratchpad cleared")
scratchpad.clear()

0 comments on commit 1c3321d

Please sign in to comment.