import os import datetime import gradio as gr import openf1_tools from smolagents import InferenceClientModel, LiteLLMModel, ToolCallingAgent, MCPClient # Can manully set this to a specific time to make the agent think it is in the past time = datetime.datetime.now().astimezone().isoformat() SYSTEM_PROMPT = """You are a helpful Formula 1 assistant and strategist. You have access to various F1 data and tools to help answer questions about races, drivers, teams, and more. Be concise and accurate in your responses. If you don't know something, use the available tools to find the information. In addition, you will be asked to act as a live race engineer strategist during a Formula 1 race, making crucial calls during the event. Current time (ISO 8601): {time}""" def agent_chat(message: str, history: list): # Manually compose messages: system prompt, then history, then current user message message = f"{SYSTEM_PROMPT}\n{"\n".join([f"{x['role']}: {x['content']}" for x in history])}\nTask: {message}" return agent.run(message, max_steps=5) if __name__ == "__main__": list_tools = True # Set to True to only list tools (used for debugging) local_model = True # If you have Ollama installed, set this to True openf1_tool_only = True try: mcp_client = MCPClient( {"url": "https://agents-mcp-hackathon-f1-mcp-server.hf.space/gradio_api/mcp/sse", "transport": "sse"}) tools = mcp_client.get_tools() if openf1_tool_only: openf1_fn_names = [f"f1_mcp_server_{fn}" for fn in dir(openf1_tools) if callable(getattr(openf1_tools, fn))] openf1_fn_names.remove("f1_mcp_server_urlopen") tools = [t for t in tools if (t.name in openf1_fn_names)] if list_tools: print("### MCP tools ### ") print("\n".join(f"Tool {1+i}: {t.name}: {t.description}" for i,t in enumerate(tools))) mcp_client.disconnect() exit(0) # Define model if local_model: model = LiteLLMModel( model_id="ollama_chat/qwen3:1.7b", api_base="http://127.0.0.1:11434", # Default ollama server num_ctx=32768, ) else: model = InferenceClientModel( model_id="deepseek-ai/DeepSeek-R1", provider="nebius", api_key=os.getenv("NEBIUS_API_KEY") ) agent = ToolCallingAgent(model=model, tools=[*tools]) chat_interface = gr.ChatInterface( fn=agent_chat, type="messages", examples=[ "What are the driver standings for the 2024 Formula 1 season?", "What is the calendar for the 2024 Formula 1 season?" ], title="🏎️ Formula 1 Assistant", description="This is a simple agent that uses MCP tools to answer questions about Formula 1." ) chat_interface.launch() finally: mcp_client.disconnect()