Commit
·
2ab5a48
1
Parent(s):
90bf24a
assdf
Browse files- __pycache__/agent.cpython-310.pyc +0 -0
- agent.py +5 -4
__pycache__/agent.cpython-310.pyc
CHANGED
Binary files a/__pycache__/agent.cpython-310.pyc and b/__pycache__/agent.cpython-310.pyc differ
|
|
agent.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
"""LangGraph Agent"""
|
2 |
import os
|
|
|
3 |
from dotenv import load_dotenv
|
4 |
from langgraph.graph import START, StateGraph, MessagesState
|
5 |
from langgraph.prebuilt import tools_condition
|
@@ -16,7 +17,7 @@ from langchain_core.tools import tool
|
|
16 |
from langchain.tools.retriever import create_retriever_tool
|
17 |
from supabase.client import Client, create_client
|
18 |
from pydantic import BaseModel, Field
|
19 |
-
|
20 |
|
21 |
from typing import List, Set, Any
|
22 |
|
@@ -335,12 +336,12 @@ def build_graph(provider: str = "huggingface"):
|
|
335 |
# Groq https://console.groq.com/docs/models
|
336 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
337 |
elif provider == "huggingface":
|
338 |
-
repo_id = "togethercomputer/evo-1-131k-base"
|
339 |
-
repo_id="HuggingFaceH4/zephyr-7b-beta",
|
340 |
if not hf_token:
|
341 |
raise ValueError("HF_TOKEN environment variable not set. It's required for Hugging Face provider.")
|
342 |
llm = HuggingFaceEndpoint(
|
343 |
-
repo_id="
|
344 |
provider="auto",
|
345 |
task="text-generation",
|
346 |
max_new_tokens=1000,
|
|
|
1 |
"""LangGraph Agent"""
|
2 |
import os
|
3 |
+
import pandas as pd
|
4 |
from dotenv import load_dotenv
|
5 |
from langgraph.graph import START, StateGraph, MessagesState
|
6 |
from langgraph.prebuilt import tools_condition
|
|
|
17 |
from langchain.tools.retriever import create_retriever_tool
|
18 |
from supabase.client import Client, create_client
|
19 |
from pydantic import BaseModel, Field
|
20 |
+
|
21 |
|
22 |
from typing import List, Set, Any
|
23 |
|
|
|
336 |
# Groq https://console.groq.com/docs/models
|
337 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
338 |
elif provider == "huggingface":
|
339 |
+
# repo_id = "togethercomputer/evo-1-131k-base"
|
340 |
+
# repo_id="HuggingFaceH4/zephyr-7b-beta",
|
341 |
if not hf_token:
|
342 |
raise ValueError("HF_TOKEN environment variable not set. It's required for Hugging Face provider.")
|
343 |
llm = HuggingFaceEndpoint(
|
344 |
+
repo_id="togethercomputer/LLaMA-2-7B-32K",
|
345 |
provider="auto",
|
346 |
task="text-generation",
|
347 |
max_new_tokens=1000,
|