Skip to content

Commit

Permalink
Graph mode response changed to show actual error occured during execu…
Browse files Browse the repository at this point in the history
…tion
  • Loading branch information
kaustubh-darekar committed Dec 11, 2024
1 parent e29ebb4 commit 32e4fe7
Showing 1 changed file with 12 additions and 3 deletions.
15 changes: 12 additions & 3 deletions backend/src/QA_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -539,6 +539,8 @@ def create_graph_chain(model, graph):
def get_graph_response(graph_chain, question):
try:
cypher_res = graph_chain.invoke({"query": question})
if not cypher_res:
raise Exception("The invoke method returned None")

response = cypher_res.get("result")
cypher_query = ""
Expand All @@ -555,17 +557,24 @@ def get_graph_response(graph_chain, question):
"cypher_query": cypher_query,
"context": context
}

except Exception as e:
logging.error(f"An error occurred while getting the graph response : {e}")
return {
"response": None,
"cypher_query": None,
"context": None,
"error": str(e)
}

def process_graph_response(model, graph, question, messages, history):
try:
graph_chain, qa_llm, model_version = create_graph_chain(model, graph)

graph_response = get_graph_response(graph_chain, question)

ai_response_content = graph_response.get("response", "Something went wrong")
if graph_response.get("error"):
raise Exception(graph_response['error'])
else:
ai_response_content = graph_response.get("response", "Something went wrong")
ai_response = AIMessage(content=ai_response_content)

messages.append(ai_response)
Expand Down

0 comments on commit 32e4fe7

Please sign in to comment.