burtenshaw commited on
Commit
c902a3f
Β·
1 Parent(s): fdab3e3

use base path to point to gradio

Browse files
Files changed (4) hide show
  1. README.md +1 -0
  2. _README.md +0 -0
  3. app.py +3 -3
  4. server.py +0 -192
README.md CHANGED
@@ -7,6 +7,7 @@ sdk: gradio
7
  sdk_version: 5.31.0
8
  app_file: app.py
9
  pinned: false
 
10
  ---
11
 
12
  # πŸ€– Hugging Face Discussion Bot
 
7
  sdk_version: 5.31.0
8
  app_file: app.py
9
  pinned: false
10
+ base_path: /gradio
11
  ---
12
 
13
  # πŸ€– Hugging Face Discussion Bot
_README.md DELETED
File without changes
app.py CHANGED
@@ -187,6 +187,6 @@ app = gr.mount_gradio_app(app, gradio_app, path="/gradio")
187
 
188
  if __name__ == "__main__":
189
  print("πŸš€ Starting HF Discussion Bot with Tiny Agents...")
190
- print("πŸ“Š Dashboard: http://localhost:8001/gradio")
191
- print("πŸ”— Webhook: http://localhost:8001/webhook")
192
- uvicorn.run("server:app", host="0.0.0.0", port=8001, reload=True)
 
187
 
188
  if __name__ == "__main__":
189
  print("πŸš€ Starting HF Discussion Bot with Tiny Agents...")
190
+ print("πŸ“Š Dashboard: http://localhost:7860/gradio")
191
+ print("πŸ”— Webhook: http://localhost:7860/webhook")
192
+ uvicorn.run("server:app", host="0.0.0.0", port=7860, reload=True)
server.py DELETED
@@ -1,192 +0,0 @@
1
- import os
2
- from datetime import datetime
3
- from typing import List, Dict, Any, Optional
4
-
5
- from fastapi import FastAPI, Request, BackgroundTasks
6
- from fastapi.middleware.cors import CORSMiddleware
7
- import gradio as gr
8
- import uvicorn
9
- from pydantic import BaseModel
10
- from huggingface_hub.inference._mcp.agent import Agent
11
- from dotenv import load_dotenv
12
-
13
- load_dotenv()
14
-
15
- # Configuration
16
- WEBHOOK_SECRET = os.getenv("WEBHOOK_SECRET", "your-webhook-secret")
17
- HF_TOKEN = os.getenv("HF_TOKEN")
18
- HF_MODEL = os.getenv("HF_MODEL", "microsoft/DialoGPT-medium")
19
- HF_PROVIDER = os.getenv("HF_PROVIDER", "huggingface")
20
-
21
- # Simple storage for processed comments
22
- comments_store: List[Dict[str, Any]] = []
23
-
24
- # Agent instance
25
- agent_instance: Optional[Agent] = None
26
-
27
-
28
- class WebhookEvent(BaseModel):
29
- event: Dict[str, str]
30
- comment: Dict[str, Any]
31
- discussion: Dict[str, Any]
32
- repo: Dict[str, str]
33
-
34
-
35
- app = FastAPI(title="HF Discussion Bot")
36
- app.add_middleware(CORSMiddleware, allow_origins=["*"])
37
-
38
-
39
- async def get_agent():
40
- """Get or create Agent instance"""
41
- global agent_instance
42
- if agent_instance is None and HF_TOKEN:
43
- agent_instance = Agent(
44
- model=HF_MODEL,
45
- provider=HF_PROVIDER,
46
- api_key=HF_TOKEN,
47
- servers=[
48
- {
49
- "type": "stdio",
50
- "config": {"command": "python", "args": ["mcp_server.py"]},
51
- }
52
- ],
53
- )
54
- await agent_instance.load_tools()
55
- return agent_instance
56
-
57
-
58
- async def process_webhook_comment(webhook_data: Dict[str, Any]):
59
- """Process webhook using Agent with MCP tools"""
60
- comment_content = webhook_data["comment"]["content"]
61
- discussion_title = webhook_data["discussion"]["title"]
62
- repo_name = webhook_data["repo"]["name"]
63
- discussion_num = webhook_data["discussion"]["num"]
64
-
65
- agent = await get_agent()
66
- if not agent:
67
- ai_response = "Error: Agent not configured (missing HF_TOKEN)"
68
- else:
69
- # Use Agent to respond to the discussion
70
- prompt = f"""
71
- Please respond to this HuggingFace discussion comment using the available tools.
72
-
73
- Repository: {repo_name}
74
- Discussion: {discussion_title} (#{discussion_num})
75
- Comment: {comment_content}
76
-
77
- First use generate_discussion_response to create a helpful response, then use post_discussion_comment to post it.
78
- """
79
-
80
- try:
81
- response_parts = []
82
- async for item in agent.run(prompt):
83
- # Collect the agent's response
84
- if hasattr(item, "content") and item.content:
85
- response_parts.append(item.content)
86
- elif isinstance(item, str):
87
- response_parts.append(item)
88
-
89
- ai_response = (
90
- " ".join(response_parts) if response_parts else "No response generated"
91
- )
92
- except Exception as e:
93
- ai_response = f"Error using agent: {str(e)}"
94
-
95
- # Store the interaction with reply link
96
- discussion_url = f"https://huggingface.co/{repo_name}/discussions/{discussion_num}"
97
-
98
- interaction = {
99
- "timestamp": datetime.now().isoformat(),
100
- "repo": repo_name,
101
- "discussion_title": discussion_title,
102
- "discussion_num": discussion_num,
103
- "discussion_url": discussion_url,
104
- "original_comment": comment_content,
105
- "ai_response": ai_response,
106
- "comment_author": webhook_data["comment"]["author"],
107
- }
108
-
109
- comments_store.append(interaction)
110
- return ai_response
111
-
112
-
113
- @app.post("/webhook")
114
- async def webhook_handler(request: Request, background_tasks: BackgroundTasks):
115
- """Handle HF Hub webhooks"""
116
- webhook_secret = request.headers.get("X-Webhook-Secret")
117
- if webhook_secret != WEBHOOK_SECRET:
118
- return {"error": "Invalid webhook secret"}
119
-
120
- payload = await request.json()
121
- event = payload.get("event", {})
122
-
123
- if event.get("action") == "create" and event.get("scope") == "discussion.comment":
124
- background_tasks.add_task(process_webhook_comment, payload)
125
- return {"status": "processing"}
126
-
127
- return {"status": "ignored"}
128
-
129
-
130
- async def simulate_webhook(
131
- repo_name: str, discussion_title: str, comment_content: str
132
- ) -> str:
133
- """Simulate webhook for testing"""
134
- if not all([repo_name, discussion_title, comment_content]):
135
- return "Please fill in all fields."
136
-
137
- mock_payload = {
138
- "event": {"action": "create", "scope": "discussion.comment"},
139
- "comment": {
140
- "content": comment_content,
141
- "author": "test-user",
142
- "created_at": datetime.now().isoformat(),
143
- },
144
- "discussion": {
145
- "title": discussion_title,
146
- "num": len(comments_store) + 1,
147
- },
148
- "repo": {"name": repo_name},
149
- }
150
-
151
- response = await process_webhook_comment(mock_payload)
152
- return f"βœ… Processed! AI Response: {response}"
153
-
154
-
155
- def create_gradio_app():
156
- """Create Gradio interface"""
157
- with gr.Blocks(title="HF Discussion Bot", theme=gr.themes.Soft()) as demo:
158
- gr.Markdown("# πŸ€– HF Discussion Bot Dashboard")
159
- gr.Markdown("*Powered by HuggingFace Tiny Agents + FastMCP*")
160
-
161
- with gr.Column():
162
- sim_repo = gr.Textbox(label="Repository", value="microsoft/DialoGPT-medium")
163
- sim_title = gr.Textbox(label="Discussion Title", value="Test Discussion")
164
- sim_comment = gr.Textbox(
165
- label="Comment",
166
- lines=3,
167
- value="How do I use this model?",
168
- )
169
- sim_btn = gr.Button("πŸ“€ Test Webhook")
170
-
171
- with gr.Column():
172
- sim_result = gr.Textbox(label="Result", lines=8)
173
-
174
- sim_btn.click(
175
- fn=simulate_webhook,
176
- inputs=[sim_repo, sim_title, sim_comment],
177
- outputs=[sim_result],
178
- )
179
-
180
- return demo
181
-
182
-
183
- # Mount Gradio app
184
- gradio_app = create_gradio_app()
185
- app = gr.mount_gradio_app(app, gradio_app, path="/gradio")
186
-
187
-
188
- if __name__ == "__main__":
189
- print("πŸš€ Starting HF Discussion Bot with Tiny Agents...")
190
- print("πŸ“Š Dashboard: http://localhost:8001/gradio")
191
- print("πŸ”— Webhook: http://localhost:8001/webhook")
192
- uvicorn.run("server:app", host="0.0.0.0", port=8001, reload=True)