mgbam commited on
Commit
7382906
·
verified ·
1 Parent(s): 6f1c529

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -18
app.py CHANGED
@@ -1,22 +1,36 @@
 
 
1
  from huggingface_hub import MCPClient
2
 
3
- # Use Ollama via its OpenAI-compatible endpoint
4
- client = MCPClient(
5
- provider="openai",
6
- base_url="http://localhost:11434/v1",
7
- model="devstral", # or your custom Ollama model name
8
- api_key="ollama" # any non-empty string is accepted by Ollama
9
- )
 
10
 
11
- # Attach Playwright MCP (local stdio server)
12
- client.add_mcp_server(
13
- type="stdio",
14
- command="npx",
15
- args=["@playwright/mcp@latest"]
16
- )
17
 
18
- # Run a single tool-using turn
19
- for chunk in client.process_single_turn_with_tools(
20
- messages=[{"role": "user", "content": "Open the browser and list the titles on https://news.ycombinator.com"}]
21
- ):
22
- print(chunk)
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.py
2
+ import asyncio
3
  from huggingface_hub import MCPClient
4
 
5
+ async def main():
6
+ # Ollama's OpenAI-compatible endpoint
7
+ client = MCPClient(
8
+ provider="openai",
9
+ base_url="http://localhost:11434/v1",
10
+ model="devstral", # or your custom Ollama model name
11
+ api_key="ollama" # any non-empty string works for Ollama
12
+ )
13
 
14
+ # 1) Await the coroutine
15
+ await client.add_mcp_server(
16
+ type="stdio",
17
+ command="npx",
18
+ args=["@playwright/mcp@latest"]
19
+ )
20
 
21
+ # 2) Iterate with 'async for' over the async generator
22
+ user_msg = {
23
+ "role": "user",
24
+ "content": "Open the browser and list the titles on https://news.ycombinator.com"
25
+ }
26
+
27
+ async for chunk in client.process_single_turn_with_tools(messages=[user_msg]):
28
+ # chunks are streamed tool/assistant updates; print or handle them
29
+ print(chunk)
30
+
31
+ # optional: close network/process resources if the client exposes it
32
+ if hasattr(client, "aclose"):
33
+ await client.aclose()
34
+
35
+ if __name__ == "__main__":
36
+ asyncio.run(main())