Skip to content

Commit

Permalink
feat: Supporting invalid query response!
Browse files Browse the repository at this point in the history
  • Loading branch information
amindadgar committed Dec 17, 2024
1 parent ff7bc97 commit d859da4
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 2 deletions.
7 changes: 6 additions & 1 deletion subquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from llama_index.core.tools import QueryEngineTool, ToolMetadata
from llama_index.llms.openai import OpenAI
from llama_index.question_gen.guidance import GuidanceQuestionGenerator
from tc_hivemind_backend.db.utils.preprocess_text import BasePreprocessor
from tc_hivemind_backend.embeddings.cohere import CohereEmbedding
from utils.qdrant_utils import QDrantUtils
from utils.query_engine import (
Expand All @@ -19,7 +20,7 @@
WebsiteQueryEngine,
prepare_discord_engine_auto_filter,
)

from utils.globals import INVALID_QUERY_RESPONSE

def query_multiple_source(
query: str,
Expand Down Expand Up @@ -198,6 +199,10 @@ def query_multiple_source(
metadata=tool_metadata,
)
)
if not BasePreprocessor().extract_main_content(text=query):
response = INVALID_QUERY_RESPONSE
source_nodes = []
return response, source_nodes

embed_model = CohereEmbedding()
llm = OpenAI("gpt-4o-mini")
Expand Down
2 changes: 2 additions & 0 deletions utils/globals.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
# the theshold to skip nodes of being included in an answer
RETRIEVER_THRESHOLD = 0.4
INVALID_QUERY_RESPONSE="We're unable to process your query. Please refine it and try again."
QUERY_ERROR_MESSAGE="Sorry, we're unable to process your question at the moment. Please try again later."
3 changes: 2 additions & 1 deletion worker/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from llama_index.core.schema import NodeWithScore
from subquery import query_multiple_source
from utils.data_source_selector import DataSourceSelector
from utils.globals import QUERY_ERROR_MESSAGE
from utils.query_engine.prepare_answer_sources import PrepareAnswerSources
from utils.traceloop import init_tracing
from worker.celery import app
Expand All @@ -21,7 +22,7 @@ def ask_question_auto_search(
)
answer_sources = PrepareAnswerSources().prepare_answer_sources(nodes=references)
except Exception:
response = "Sorry, We cannot process your question at the moment."
response = QUERY_ERROR_MESSAGE
answer_sources = None
logging.error(
f"Errors raised while processing the question for community: {community_id}!"
Expand Down

0 comments on commit d859da4

Please sign in to comment.