File size: 2,386 Bytes
1bbca12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from langchain_openai import ChatOpenAI
from langchain_core.tools import tool
from duckduckgo_search import DDGS
import os
from dotenv import load_dotenv

# Load environment variables
load_dotenv()
api_key = os.getenv("OPENAI_API_KEY")
if not api_key:
    raise ValueError("OPENAI_API_KEY environment variable not set")

@tool
async def web_search(query: str) -> str:
    """
    Performs a web search using DuckDuckGo and returns a string of results.
    
    Args:
        query (str): The search query string.
    
    Returns:
        str: A string containing the search results.
    """
    try:
        with DDGS() as ddgs:
            results = await ddgs.atext(keywords=query, max_results=5)
            return "\n".join([f"{r['title']}: {r['body']}" for r in results])
    except Exception as e:
        return f"Error performing web search: {str(e)}"

search_tool = web_search

class MultiHopSearchTool:
    def __init__(self):
        self.name = "multi_hop_search"
        self.description = "Performs iterative web searches to refine results for complex queries."
        self.inputs = {
            "query": {"type": "string", "description": "Initial search query"},
            "steps": {"type": "integer", "description": "Number of search iterations (default: 3)"}
        }
        self.output_type = str
        self.llm = ChatOpenAI(
            model="gpt-4o",
            api_key=api_key,
            temperature=0,
            http_client=None  # Explicitly disable custom HTTP client to avoid proxies
        )

    async def aparse(self, query: str, steps: int = 3) -> str:
        try:
            current_query = query
            results = []
            for _ in range(steps):
                search_result = await web_search.invoke({"query": current_query})
                results.append(search_result)
                
                # Refine query using LLM
                prompt = f"""Based on the query: {current_query}
                And the search results: {search_result}
                Generate a refined search query to get more precise results."""
                response = await self.llm.ainvoke(prompt)
                current_query = response.content
            
            return "\n\n".join(results)
        except Exception as e:
            return f"Error in multi-hop search: {str(e)}"

multi_hop_search_tool = MultiHopSearchTool()