|
import os |
|
import datetime |
|
import gradio as gr |
|
import openf1_tools |
|
from smolagents import InferenceClientModel, LiteLLMModel, ToolCallingAgent, MCPClient |
|
|
|
|
|
time = datetime.datetime.now().astimezone().isoformat() |
|
|
|
SYSTEM_PROMPT = """You are a helpful Formula 1 assistant and strategist. You have access to various F1 data and tools to help answer questions about races, drivers, teams, and more. |
|
Be concise and accurate in your responses. If you don't know something, use the available tools to find the information. |
|
In addition, you will be asked to act as a live race engineer strategist during a Formula 1 race, making crucial calls during the event. |
|
|
|
Current time (ISO 8601): {time}""" |
|
|
|
|
|
def agent_chat(message: str, history: list): |
|
|
|
|
|
message = f"{SYSTEM_PROMPT}\n{"\n".join([f"{x['role']}: {x['content']}" for x in history])}\nTask: {message}" |
|
return agent.run(message, max_steps=5) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
list_tools = True |
|
local_model = True |
|
openf1_tool_only = True |
|
|
|
try: |
|
|
|
mcp_client = MCPClient( |
|
{"url": "https://agents-mcp-hackathon-f1-mcp-server.hf.space/gradio_api/mcp/sse", "transport": "sse"}) |
|
tools = mcp_client.get_tools() |
|
|
|
if openf1_tool_only: |
|
openf1_fn_names = [f"f1_mcp_server_{fn}" for fn in dir(openf1_tools) if callable(getattr(openf1_tools, fn))] |
|
openf1_fn_names.remove("f1_mcp_server_urlopen") |
|
tools = [t for t in tools if (t.name in openf1_fn_names)] |
|
|
|
if list_tools: |
|
print("### MCP tools ### ") |
|
print("\n".join(f"Tool {1+i}: {t.name}: {t.description}" for i,t in enumerate(tools))) |
|
mcp_client.disconnect() |
|
exit(0) |
|
|
|
|
|
|
|
if local_model: |
|
model = LiteLLMModel( |
|
model_id="ollama_chat/qwen3:1.7b", |
|
api_base="http://127.0.0.1:11434", |
|
num_ctx=32768, |
|
) |
|
else: |
|
model = InferenceClientModel( |
|
model_id="deepseek-ai/DeepSeek-R1", |
|
provider="nebius", |
|
api_key=os.getenv("NEBIUS_API_KEY") |
|
) |
|
|
|
agent = ToolCallingAgent(model=model, tools=[*tools]) |
|
|
|
|
|
chat_interface = gr.ChatInterface( |
|
fn=agent_chat, |
|
type="messages", |
|
examples=[ |
|
"What are the driver standings for the 2024 Formula 1 season?", |
|
"What is the calendar for the 2024 Formula 1 season?" |
|
], |
|
title="๐๏ธ Formula 1 Assistant", |
|
description="This is a simple agent that uses MCP tools to answer questions about Formula 1." |
|
) |
|
|
|
chat_interface.launch() |
|
|
|
finally: |
|
mcp_client.disconnect() |