Upload 17 files
Browse files- src/__init__.py +0 -0
- src/__pycache__/__init__.cpython-310.pyc +0 -0
- src/__pycache__/config.cpython-310.pyc +0 -0
- src/agent.py +27 -0
- src/config.py +17 -0
- src/temp_test.py +2 -0
- src/tools/__init__.py +0 -0
- src/tools/__pycache__/__init__.cpython-310.pyc +0 -0
- src/tools/__pycache__/file_loader.cpython-310.pyc +0 -0
- src/tools/__pycache__/python_tool.cpython-310.pyc +0 -0
- src/tools/__pycache__/serper_search.cpython-310.pyc +0 -0
- src/tools/__pycache__/web_search.cpython-310.pyc +0 -0
- src/tools/file_loader.py +36 -0
- src/tools/news_search.py +11 -0
- src/tools/python_tool.py +5 -0
- src/tools/serper_search.py +15 -0
- src/tools/web_search.py +7 -0
src/__init__.py
ADDED
File without changes
|
src/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (133 Bytes). View file
|
|
src/__pycache__/config.cpython-310.pyc
ADDED
Binary file (731 Bytes). View file
|
|
src/agent.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# src/agent.py
|
2 |
+
from langchain.agents import create_react_agent, AgentExecutor
|
3 |
+
from src.config import groq_llm
|
4 |
+
from src.tools.python_tool import python_tool
|
5 |
+
from langchain.hub import pull
|
6 |
+
from src.tools.serper_search import serper_search_tool
|
7 |
+
from src.tools.file_loader import file_loader_tool
|
8 |
+
|
9 |
+
|
10 |
+
llm = groq_llm(model="llama3-70b-8192")
|
11 |
+
tools = [python_tool, serper_search_tool, file_loader_tool]
|
12 |
+
prompt = pull("hwchase17/react")
|
13 |
+
|
14 |
+
agent = create_react_agent(llm=llm, tools=tools, prompt=prompt)
|
15 |
+
|
16 |
+
agent_executor = AgentExecutor(
|
17 |
+
agent=agent,
|
18 |
+
tools=tools,
|
19 |
+
verbose=True,
|
20 |
+
handle_parsing_errors=True, # retry if the model slips
|
21 |
+
)
|
22 |
+
|
23 |
+
if __name__ == "__main__":
|
24 |
+
print(agent_executor.invoke({
|
25 |
+
"input": "Load this tiny sample CSV and show me the first rows: "
|
26 |
+
"https://people.sc.fsu.edu/~jburkardt/data/csv/hw_200.csv"
|
27 |
+
}))
|
src/config.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Central place to load env vars and expose shared singletons.
|
3 |
+
"""
|
4 |
+
|
5 |
+
from functools import lru_cache
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
from dotenv import load_dotenv
|
9 |
+
from langchain_groq import ChatGroq
|
10 |
+
|
11 |
+
# Load .env once when the module is imported
|
12 |
+
load_dotenv(dotenv_path=Path(__file__).resolve().parents[1] / ".env")
|
13 |
+
|
14 |
+
@lru_cache
|
15 |
+
def groq_llm(model: str = "llama3-70b-8192", temperature: float = 0.0):
|
16 |
+
"""Return a memoized Groq chat model instance."""
|
17 |
+
return ChatGroq(model_name=model, temperature=temperature)
|
src/temp_test.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
from src.config import groq_llm
|
2 |
+
print(groq_llm().invoke("Ping!"))
|
src/tools/__init__.py
ADDED
File without changes
|
src/tools/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (139 Bytes). View file
|
|
src/tools/__pycache__/file_loader.cpython-310.pyc
ADDED
Binary file (1.32 kB). View file
|
|
src/tools/__pycache__/python_tool.cpython-310.pyc
ADDED
Binary file (311 Bytes). View file
|
|
src/tools/__pycache__/serper_search.cpython-310.pyc
ADDED
Binary file (515 Bytes). View file
|
|
src/tools/__pycache__/web_search.cpython-310.pyc
ADDED
Binary file (366 Bytes). View file
|
|
src/tools/file_loader.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io, os, mimetypes, requests, pandas as pd
|
2 |
+
from langchain.tools import StructuredTool
|
3 |
+
|
4 |
+
def _load_dataset(url_or_path: str, head_only: bool = True) -> str:
|
5 |
+
"""Fetch a CSV/JSON file (local or remote) and return either
|
6 |
+
the first ~5 rows or basic stats."""
|
7 |
+
# Grab bytes
|
8 |
+
if url_or_path.startswith(("http://", "https://")):
|
9 |
+
resp = requests.get(url_or_path, timeout=10)
|
10 |
+
resp.raise_for_status()
|
11 |
+
raw = io.BytesIO(resp.content)
|
12 |
+
else: # local
|
13 |
+
raw = open(os.path.expanduser(url_or_path), "rb")
|
14 |
+
|
15 |
+
# Detect type
|
16 |
+
mime = mimetypes.guess_type(url_or_path)[0] or ""
|
17 |
+
if "json" in mime or url_or_path.lower().endswith(".json"):
|
18 |
+
df = pd.read_json(raw)
|
19 |
+
elif "csv" in mime or url_or_path.lower().endswith(".csv"):
|
20 |
+
df = pd.read_csv(raw)
|
21 |
+
else:
|
22 |
+
raise ValueError("Only CSV or JSON supported.")
|
23 |
+
|
24 |
+
if head_only:
|
25 |
+
return df.head().to_markdown(index=False)
|
26 |
+
return df.describe(include="all").to_markdown()
|
27 |
+
|
28 |
+
file_loader_tool = StructuredTool.from_function(
|
29 |
+
name="load_dataset",
|
30 |
+
description=(
|
31 |
+
"Load a CSV or JSON file from a URL or local path. "
|
32 |
+
"Returns a markdown preview of the data."
|
33 |
+
),
|
34 |
+
func=_load_dataset,
|
35 |
+
return_direct=True, # skip extra narration
|
36 |
+
)
|
src/tools/news_search.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# from langchain_newsapi import NewsAPISearchRun
|
2 |
+
# import os
|
3 |
+
|
4 |
+
# news_search_tool = NewsAPISearchRun(
|
5 |
+
# name="news_api_search",
|
6 |
+
# description=(
|
7 |
+
# "Search recent news articles for up-to-date facts. "
|
8 |
+
# "Returns headline, source and short snippet."
|
9 |
+
# ),
|
10 |
+
# news_api_key=os.getenv("NEWSAPI_KEY"),
|
11 |
+
# )
|
src/tools/python_tool.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_experimental.tools.python.tool import PythonREPLTool
|
2 |
+
|
3 |
+
python_tool = PythonREPLTool(
|
4 |
+
description="Executes Python code and returns the result."
|
5 |
+
)
|
src/tools/serper_search.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_community.utilities import GoogleSerperAPIWrapper
|
2 |
+
from langchain.agents import Tool
|
3 |
+
|
4 |
+
import os
|
5 |
+
|
6 |
+
search_tool = GoogleSerperAPIWrapper(
|
7 |
+
serper_api_key=os.getenv("SERPER_API_KEY"), # add this to your .env
|
8 |
+
k=3 # top-5 results
|
9 |
+
)
|
10 |
+
|
11 |
+
serper_search_tool = Tool(
|
12 |
+
name="Google Search",
|
13 |
+
func=search_tool.run,
|
14 |
+
description="Useful for answering questions by searching the web."
|
15 |
+
)
|
src/tools/web_search.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_community.tools.ddg_search import DuckDuckGoSearchRun
|
2 |
+
|
3 |
+
# Simple, no-API-key web search
|
4 |
+
web_search_tool = DuckDuckGoSearchRun(
|
5 |
+
name="duckduckgo_search",
|
6 |
+
description="Search the web for recent information and return the top snippets."
|
7 |
+
)
|