Luong Huu Thanh commited on
Commit
b4ab0f9
·
1 Parent(s): e07df60

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +16 -22
agent.py CHANGED
@@ -43,14 +43,12 @@ load_dotenv()
43
 
44
 
45
  @tool
46
- def wiki_search(query: str) -> Dict:
47
- """
48
- Seach Wikipedia for a query and return maximum 2 results.
49
 
50
  Args:
51
- query (str): the search query
52
- """
53
- search_docs = WikipediaLoader(query=query, max_results=2).load()
54
  formatted_search_docs = "\n\n---\n\n".join(
55
  [
56
  f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
@@ -61,14 +59,12 @@ def wiki_search(query: str) -> Dict:
61
 
62
 
63
  @tool
64
- def web_search(query: str) -> Dict:
65
- """
66
- Search the web for a query and return maximum 2 results.
67
 
68
  Args:
69
- query (str): the search query
70
- """
71
- search_docs = TavilySearchResults(query=query, max_results=2).load()
72
  formatted_search_docs = "\n\n---\n\n".join(
73
  [
74
  f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
@@ -79,21 +75,19 @@ def web_search(query: str) -> Dict:
79
 
80
 
81
  @tool
82
- def arxiv_search(query: str) -> Dict:
83
- """
84
- Search arXiv for a query and return maximum 2 results.
85
 
86
  Args:
87
- query (str): the search query
88
- """
89
- search_docs = ArxivLoader(query=query, max_results=2).load()
90
  formatted_search_docs = "\n\n---\n\n".join(
91
  [
92
- f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
93
  for doc in search_docs
94
  ]
95
  )
96
- return {"arxiv_results": formatted_search_docs}
97
 
98
 
99
  ### =============== CODE INTERPRETER TOOLS =============== ###
@@ -743,7 +737,7 @@ def build_graph(provider: str = "groq"):
743
  # Load environment variables from .env file
744
  if provider == "groq":
745
  # Groq https://console.groq.com/docs/models
746
- llm = ChatGroq(model="meta-llama/llama-4-scout-17b-16e-instruct", temperature=0)
747
  elif provider == "huggingface":
748
  # TODO: Add huggingface endpoint
749
  llm = ChatHuggingFace(
@@ -798,7 +792,7 @@ def build_graph(provider: str = "groq"):
798
 
799
  # test
800
  if __name__ == "__main__":
801
- question = "What is the capital of Vietnam?"
802
  graph = build_graph(provider="groq")
803
  messages = [HumanMessage(content=question)]
804
  messages = graph.invoke({"messages": messages})
 
43
 
44
 
45
  @tool
46
+ def wiki_search(query: str) -> str:
47
+ """Search Wikipedia for a query and return maximum 2 results.
 
48
 
49
  Args:
50
+ query: The search query."""
51
+ search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
 
52
  formatted_search_docs = "\n\n---\n\n".join(
53
  [
54
  f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
 
59
 
60
 
61
  @tool
62
+ def web_search(query: str) -> str:
63
+ """Search Tavily for a query and return maximum 3 results.
 
64
 
65
  Args:
66
+ query: The search query."""
67
+ search_docs = TavilySearchResults(max_results=3).invoke(query=query)
 
68
  formatted_search_docs = "\n\n---\n\n".join(
69
  [
70
  f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
 
75
 
76
 
77
  @tool
78
+ def arxiv_search(query: str) -> str:
79
+ """Search Arxiv for a query and return maximum 3 result.
 
80
 
81
  Args:
82
+ query: The search query."""
83
+ search_docs = ArxivLoader(query=query, load_max_docs=3).load()
 
84
  formatted_search_docs = "\n\n---\n\n".join(
85
  [
86
+ f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
87
  for doc in search_docs
88
  ]
89
  )
90
+ return {"arvix_results": formatted_search_docs}
91
 
92
 
93
  ### =============== CODE INTERPRETER TOOLS =============== ###
 
737
  # Load environment variables from .env file
738
  if provider == "groq":
739
  # Groq https://console.groq.com/docs/models
740
+ llm = ChatGroq(model="qwen-qwq-32b", temperature=0)
741
  elif provider == "huggingface":
742
  # TODO: Add huggingface endpoint
743
  llm = ChatHuggingFace(
 
792
 
793
  # test
794
  if __name__ == "__main__":
795
+ question = "How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia."
796
  graph = build_graph(provider="groq")
797
  messages = [HumanMessage(content=question)]
798
  messages = graph.invoke({"messages": messages})