jproman's picture
agent ready to start
f013b22
raw
history blame
792 Bytes
import config
if config.runLocal:
from ollama import chat as OllamaChat
from langchain_community.tools import DuckDuckGoSearchRun
def callWebSearch(query):
return DuckDuckGo(query)
def callLLM(query):
if config.runLocal:
return callLocalLLM(query)
else:
return callHfLLM(query)
def DuckDuckGo(query):
search_tool = DuckDuckGoSearchRun()
results = search_tool.invoke(query)
return results
def callLocalLLM(query):
response = OllamaChat(model=config.localModel, messages=[ { 'role': 'user', 'content': query } ])
return response['message']['content']
def callHfLLM(query):
return f"No way to call {config.hfMoldel} yet"
if __name__ == "__main__":
response = callWebSearch("who is the president of France")
print(response)