naman1102 commited on
Commit
3b2a7e8
·
1 Parent(s): 8c7fbf9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -2
app.py CHANGED
@@ -11,6 +11,7 @@ from langgraph.graph import Graph, StateGraph
11
  from typing_extensions import TypedDict
12
  from openai import OpenAI
13
  from tools import simple_search
 
14
 
15
  # -------------------------
16
  # Utility helpers
@@ -105,15 +106,25 @@ class BasicAgent:
105
  state["current_step"] = "answer"
106
  return state
107
 
 
 
 
 
 
 
 
 
108
  def _generate_answer(self, state: AgentState) -> AgentState:
109
  history_text = "\n".join(str(item) for item in state["history"])
110
  prompt = (
111
  f"Answer the user question as directly as possible. If sources were retrieved, incorporate them.\n"
112
  f"Question: {state['question']}\n\nContext:\n{history_text}\n\n"
113
- "Give ONLY the final answer without extra formatting or explanation."
 
 
114
  )
115
  answer = self._call_llm(prompt, max_tokens=150)
116
- state["final_answer"] = answer
117
  state["history"].append({"step": "answer", "output": answer})
118
  state["logs"]["final_answer"] = {"prompt": prompt, "response": answer}
119
  state["current_step"] = "done"
 
11
  from typing_extensions import TypedDict
12
  from openai import OpenAI
13
  from tools import simple_search
14
+ import re
15
 
16
  # -------------------------
17
  # Utility helpers
 
106
  state["current_step"] = "answer"
107
  return state
108
 
109
+ def _extract_boxed_answer(self, text: str) -> str:
110
+ """Extract answer from boxed format or return original text if no box found."""
111
+ # Look for text between [box] and [/box] tags
112
+ box_match = re.search(r'\[box\](.*?)\[/box\]', text, re.DOTALL)
113
+ if box_match:
114
+ return box_match.group(1).strip()
115
+ return text.strip()
116
+
117
  def _generate_answer(self, state: AgentState) -> AgentState:
118
  history_text = "\n".join(str(item) for item in state["history"])
119
  prompt = (
120
  f"Answer the user question as directly as possible. If sources were retrieved, incorporate them.\n"
121
  f"Question: {state['question']}\n\nContext:\n{history_text}\n\n"
122
+ "Give ONLY the final answer without extra formatting or explanation.\n"
123
+ "Put your answer in a box using [box] and [/box] tags.\n"
124
+ "If you cannot find a definitive answer, say 'I cannot find a definitive answer to this question.'"
125
  )
126
  answer = self._call_llm(prompt, max_tokens=150)
127
+ state["final_answer"] = self._extract_boxed_answer(answer)
128
  state["history"].append({"step": "answer", "output": answer})
129
  state["logs"]["final_answer"] = {"prompt": prompt, "response": answer}
130
  state["current_step"] = "done"