File size: 792 Bytes
1649601
f013b22
 
 
 
1649601
 
 
f013b22
 
 
 
 
 
 
 
 
1649601
 
 
 
 
f013b22
 
 
 
 
 
1649601
 
f013b22
1649601
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32

import config
if config.runLocal:
    from ollama import chat as OllamaChat

from langchain_community.tools import DuckDuckGoSearchRun


def callWebSearch(query):
    return DuckDuckGo(query)

def callLLM(query):
    if config.runLocal:
        return callLocalLLM(query)
    else:
        return callHfLLM(query)

def DuckDuckGo(query):
    search_tool = DuckDuckGoSearchRun()
    results = search_tool.invoke(query)
    return results

def callLocalLLM(query):
    response = OllamaChat(model=config.localModel, messages=[ { 'role': 'user', 'content': query } ])
    return response['message']['content']

def callHfLLM(query):
    return f"No way to call {config.hfMoldel} yet"

if __name__ == "__main__":
    response = callWebSearch("who is the president of France")
    print(response)