AksharaSachin commited on
Commit
9142765
·
verified ·
1 Parent(s): 70cb086

Create agent.py

Browse files
Files changed (1) hide show
  1. agent.py +90 -0
agent.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.chat_models import ChatOllama
2
+ from langgraph.graph import MessagesState, StateGraph, START, END
3
+ from langchain_core.messages import SystemMessage, HumanMessage
4
+ from langchain_community.tools import DuckDuckGoSearchRun
5
+ from langchain_core.tools import tool
6
+ from langgraph.prebuilt import ToolNode
7
+ from langchain_community.document_loaders import WikipediaLoader
8
+ from langgraph.prebuilt import tools_condition
9
+ from langchain_huggingface import HuggingFaceEndpoint
10
+ import os
11
+ from huggingface_hub import login
12
+ from dotenv import load_dotenv
13
+ load_dotenv()
14
+ os.environ["HUGGINGFACEHUB_API_TOKEN"] = os.getenv("HF_TOKEN")
15
+
16
+ @tool
17
+ def use_search_tool(query: str) -> str:
18
+ """Use the search tool to find information.
19
+
20
+ Args: query (str): The search query.
21
+ Returns: str: The search result.
22
+ """
23
+ search_result = DuckDuckGoSearchRun(verbose=0).run(query)
24
+ return {"search_result": search_result}
25
+
26
+ @tool
27
+ def use_wikipedia_tool(query: str) -> str:
28
+ """Fetch a summary from Wikipedia.
29
+
30
+ Args:
31
+ query (str): The topic to search on Wikipedia.
32
+ Returns:
33
+ str: A summary of the topic from Wikipedia.
34
+ """
35
+ result = WikipediaLoader(query=query, load_max_docs=2).load()
36
+ if result:
37
+ return {"Wikipedia_summary": result}
38
+ else:
39
+ return f"Sorry, I couldn't find any information on '{query}' in Wikipedia."
40
+
41
+ def build_agent():
42
+ # llm = ChatOllama(model="llama3.1")
43
+ llm = HuggingFaceEndpoint(
44
+ endpoint_url="https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-Prover-V2-671B",
45
+ huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
46
+ )
47
+ tools = [use_wikipedia_tool, use_search_tool]
48
+
49
+ system_template = (
50
+ "You are a helpful, friendly, and respectful AI assistant. "
51
+ "Always address the user politely and answer their questions in a positive manner.\n"
52
+ "When reasoning, always use the following format:\n"
53
+ "Thought: [your reasoning here]\n"
54
+ "Action: [the action to take, should be one of [{tool_names}]]\n"
55
+ "Action Input: [the input to the action]\n"
56
+ "If you know the answer without using a tool, respond with:\n"
57
+ "Thought: [your reasoning here]\n"
58
+ "Final Answer: [your answer here]\n"
59
+ "Always ensure your responses are polite, accurate, and helpful."
60
+ )
61
+ system_prompt = SystemMessage(content=system_template.format(
62
+ tool_names=", ".join([tool.name for tool in tools])
63
+ ))
64
+
65
+ def call_model(state: MessagesState):
66
+ """Call the LLM with the given state."""
67
+ messages = [system_prompt] + state["messages"]
68
+ response = llm.invoke(messages)
69
+ return {"messages" : response}
70
+
71
+ workflow = StateGraph(MessagesState)
72
+ workflow.add_node("Assistent", call_model)
73
+ workflow.add_node("tools", ToolNode(tools))
74
+ workflow.add_edge(START, "Assistent")
75
+ workflow.add_conditional_edges("Assistent", tools_condition)
76
+ workflow.add_edge("tools", "Assistent")
77
+ workflow.add_edge("Assistent", END)
78
+ return workflow.compile()
79
+
80
+ if __name__ == "__main__":
81
+
82
+
83
+ graph = build_agent()
84
+ input = HumanMessage(content="Hello, how are you?")
85
+ response = graph.invoke(input)
86
+
87
+ print(response)
88
+
89
+
90
+