Update gaia_agent.py
Browse files- gaia_agent.py +5 -5
gaia_agent.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from langchain_ollama import ChatOllama
|
2 |
from langchain_core.messages import SystemMessage, HumanMessage
|
3 |
from langgraph.graph import START, StateGraph, MessagesState
|
4 |
from langgraph.prebuilt import ToolNode, tools_condition
|
@@ -154,12 +154,12 @@ sys_msg = SystemMessage(content=GAIA_SYSTEM_PROMPT)
|
|
154 |
|
155 |
|
156 |
# Build graph function
|
157 |
-
def build_graph(provider: str = "
|
158 |
"""Build the graph"""
|
159 |
# Load environment variables from .env file
|
160 |
-
if provider == "ollama":
|
161 |
-
|
162 |
-
|
163 |
llm = HuggingFaceEndpoint(
|
164 |
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct"
|
165 |
)
|
|
|
1 |
+
# from langchain_ollama import ChatOllama
|
2 |
from langchain_core.messages import SystemMessage, HumanMessage
|
3 |
from langgraph.graph import START, StateGraph, MessagesState
|
4 |
from langgraph.prebuilt import ToolNode, tools_condition
|
|
|
154 |
|
155 |
|
156 |
# Build graph function
|
157 |
+
def build_graph(provider: str = "huggingface"):
|
158 |
"""Build the graph"""
|
159 |
# Load environment variables from .env file
|
160 |
+
# if provider == "ollama":
|
161 |
+
# chat = ChatOllama(model="llama3.1")
|
162 |
+
if provider == "huggingface":
|
163 |
llm = HuggingFaceEndpoint(
|
164 |
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct"
|
165 |
)
|