ollama_mcp_gradio / demo.launcher
csepartha's picture
Upload 6 files
f14b058 verified
raw
history blame
284 Bytes
#!/bin/bash
# Pull your Ollama model (can be moved to build if desired)
ollama pull granite3.1-moe
ollama serve &
sleep 5
# Start MCP server in the background
python3 server.py &
# Start Gradio (client.py) on 0.0.0.0:7860
python3 client.py --server_name 0.0.0.0 --server_port 7860