File size: 284 Bytes
8f7f7d7
f14b058
 
8f7f7d7
 
 
f14b058
 
 
 
 
8f7f7d7
1
2
3
4
5
6
7
8
9
10
11
12
13
#!/bin/bash
# Pull your Ollama model (can be moved to build if desired)
ollama pull granite3.1-moe
ollama serve &
sleep 5

# Start MCP server in the background
python3 server.py &

# Start Gradio (client.py) on 0.0.0.0:7860
python3 client.py --server_name 0.0.0.0 --server_port 7860