VirtualOasis commited on
Commit
cfa2282
·
verified ·
1 Parent(s): 99eb8d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +256 -167
app.py CHANGED
@@ -11,6 +11,22 @@ import base64
11
  from huggingface_hub import InferenceClient
12
  import re
13
  from urllib.parse import urlparse
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  def fetch_content(url_or_text):
16
  """Fetch content from URL or return text directly.
@@ -21,38 +37,79 @@ def fetch_content(url_or_text):
21
  Returns:
22
  Extracted text content
23
  """
24
- # Check if input looks like a URL
25
- parsed = urlparse(url_or_text)
26
- if parsed.scheme in ['http', 'https']:
27
- try:
28
- headers = {
29
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
30
- }
31
- response = requests.get(url_or_text, headers=headers, timeout=10)
32
- response.raise_for_status()
33
-
34
- # Parse HTML and extract text
35
- soup = BeautifulSoup(response.content, 'html.parser')
36
-
37
- # Remove script and style elements
38
- for script in soup(["script", "style"]):
39
- script.decompose()
40
-
41
- # Get text and clean it up
42
- text = soup.get_text()
43
- lines = (line.strip() for line in text.splitlines())
44
- chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
45
- text = ' '.join(chunk for chunk in chunks if chunk)
46
-
47
- return text[:5000] # Limit to first 5000 characters
48
- except Exception as e:
49
- return f"Error fetching URL: {str(e)}"
50
- else:
51
- # It's direct text input
52
- return url_or_text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  def extract_entities(text):
55
- """Extract entities and relationships using Mistral.
56
 
57
  Args:
58
  text: Input text to analyze
@@ -61,9 +118,15 @@ def extract_entities(text):
61
  Dictionary containing entities and relationships
62
  """
63
  try:
 
 
 
 
 
 
64
  client = InferenceClient(
65
  provider="together",
66
- api_key=os.environ.get("HF_TOKEN"),
67
  )
68
 
69
  prompt = f"""
@@ -71,52 +134,51 @@ def extract_entities(text):
71
  1. Named entities (people, organizations, locations, concepts)
72
  2. Relationships between these entities
73
 
74
- Return the result as a JSON object with this structure:
75
  {{
76
  "entities": [
77
- {{"name": "entity_name", "type": "PERSON|ORG|LOCATION|CONCEPT", "description": "brief description"}}
78
  ],
79
  "relationships": [
80
  {{"source": "entity1", "target": "entity2", "relation": "relationship_type", "description": "brief description"}}
81
  ]
82
  }}
83
 
84
- Text to analyze:
85
- {text[:2000]}
86
-
87
- JSON:"""
88
 
89
  completion = client.chat.completions.create(
90
  model="mistralai/Mistral-Small-24B-Instruct-2501",
91
- messages=[
92
- {
93
- "role": "user",
94
- "content": prompt
95
- }
96
- ],
97
- max_tokens=1500,
98
- temperature=0.3,
99
  )
100
 
101
  response_text = completion.choices[0].message.content
102
 
103
- # Extract JSON from response
104
  json_match = re.search(r'\{.*\}', response_text, re.DOTALL)
105
  if json_match:
106
  json_str = json_match.group()
107
- return json.loads(json_str)
108
- else:
109
- # Fallback: create simple entities from text
110
- words = text.split()
111
- entities = []
112
- for i, word in enumerate(words[:20]): # Limit to first 20 words
113
- if word.istitle() and len(word) > 2:
114
- entities.append({"name": word, "type": "CONCEPT", "description": "Extracted entity"})
 
 
115
 
116
- return {"entities": entities, "relationships": []}
 
 
 
117
 
118
  except Exception as e:
119
- return {"entities": [{"name": "Error", "type": "ERROR", "description": str(e)}], "relationships": []}
 
120
 
121
  def build_knowledge_graph(entities_data):
122
  """Build and visualize knowledge graph.
@@ -133,70 +195,71 @@ def build_knowledge_graph(entities_data):
133
 
134
  # Add nodes (entities)
135
  entities = entities_data.get("entities", [])
136
- for entity in entities:
137
- G.add_node(entity["name"],
138
- type=entity.get("type", "UNKNOWN"),
139
- description=entity.get("description", ""))
 
 
140
 
141
  # Add edges (relationships)
142
  relationships = entities_data.get("relationships", [])
143
  for rel in relationships:
144
- if rel["source"] in G.nodes and rel["target"] in G.nodes:
145
- G.add_edge(rel["source"], rel["target"],
 
 
146
  relation=rel.get("relation", "related"),
147
  description=rel.get("description", ""))
148
 
149
  # If no relationships found, create some connections between entities
150
- if len(relationships) == 0 and len(entities) > 1:
151
- entity_names = [e["name"] for e in entities[:10]] # Limit to 10
152
- for i in range(len(entity_names) - 1):
153
- G.add_edge(entity_names[i], entity_names[i + 1], relation="related")
154
 
155
  # Create visualization
156
- fig, ax = plt.subplots(figsize=(12, 8))
157
-
158
- # Position nodes using spring layout
159
- pos = nx.spring_layout(G, k=2, iterations=50)
160
-
161
- # Color nodes by type
162
- node_colors = []
163
- type_colors = {
164
- "PERSON": "#FF6B6B",
165
- "ORG": "#4ECDC4",
166
- "LOCATION": "#45B7D1",
167
- "CONCEPT": "#96CEB4",
168
- "ERROR": "#FF0000",
169
- "UNKNOWN": "#DDA0DD"
170
- }
171
-
172
- for node in G.nodes():
173
- node_type = G.nodes[node].get('type', 'UNKNOWN')
174
- node_colors.append(type_colors.get(node_type, "#DDA0DD"))
175
-
176
- # Draw the graph
177
- nx.draw(G, pos,
178
- node_color=node_colors,
179
- node_size=1000,
180
- font_size=8,
181
- font_weight='bold',
182
- with_labels=True,
183
- edge_color='gray',
184
- width=2,
185
- alpha=0.7,
186
- ax=ax)
187
-
188
- # Add title
189
- ax.set_title("Knowledge Graph", size=16, weight='bold')
190
-
191
- # Add legend
192
- legend_elements = []
193
- for type_name, color in type_colors.items():
194
- if any(G.nodes[node].get('type') == type_name for node in G.nodes()):
195
- legend_elements.append(plt.Line2D([0], [0], marker='o', color='w',
196
- markerfacecolor=color, markersize=10, label=type_name))
197
-
198
- if legend_elements:
199
- ax.legend(handles=legend_elements, loc='upper right', bbox_to_anchor=(1.15, 1))
200
 
201
  # Convert to PIL Image
202
  fig.canvas.draw()
@@ -210,14 +273,13 @@ def build_knowledge_graph(entities_data):
210
  return pil_image
211
 
212
  except Exception as e:
213
- # Create error image
214
  fig, ax = plt.subplots(figsize=(8, 6))
215
- ax.text(0.5, 0.5, f"Error creating graph:\n{str(e)}",
216
  ha='center', va='center', fontsize=12, transform=ax.transAxes)
217
  ax.set_title("Knowledge Graph Error")
218
  ax.axis('off')
219
 
220
- # Convert to PIL Image
221
  fig.canvas.draw()
222
  img_array = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
223
  img_array = img_array.reshape(fig.canvas.get_width_height()[::-1] + (3,))
@@ -238,11 +300,14 @@ def knowledge_graph_builder(url_or_text):
238
  Tuple of (entities_json, graph_image, summary)
239
  """
240
  try:
 
 
 
241
  # Step 1: Fetch content
242
  content = fetch_content(url_or_text)
243
 
244
  if content.startswith("Error"):
245
- return content, None, "Failed to fetch content"
246
 
247
  # Step 2: Extract entities
248
  entities_data = extract_entities(content)
@@ -254,65 +319,89 @@ def knowledge_graph_builder(url_or_text):
254
  num_entities = len(entities_data.get("entities", []))
255
  num_relationships = len(entities_data.get("relationships", []))
256
 
257
- summary = f"""
258
- Knowledge Graph Analysis Complete!
259
-
260
- 📊 **Statistics:**
261
- - Entities found: {num_entities}
262
- - Relationships found: {num_relationships}
263
- - Content length: {len(content)} characters
 
264
 
265
- 🔍 **Extracted Entities:**
266
- """
 
 
 
267
 
268
- for entity in entities_data.get("entities", [])[:10]: # Show first 10
269
- summary += f"\n **{entity['name']}** ({entity.get('type', 'UNKNOWN')}): {entity.get('description', 'No description')}"
270
 
271
- if len(entities_data.get("entities", [])) > 10:
272
- summary += f"\n... and {len(entities_data.get('entities', [])) - 10} more entities"
 
 
 
273
 
274
- return json.dumps(entities_data, indent=2), graph_image, summary
275
 
276
  except Exception as e:
277
- return f"Error: {str(e)}", None, "Analysis failed"
 
278
 
279
- # Create Gradio interface
280
- demo = gr.Interface(
281
- fn=knowledge_graph_builder,
282
- inputs=[
283
- gr.Textbox(
284
- label="URL or Text Input",
285
- placeholder="Enter a URL (https://example.com) or paste text directly...",
286
- lines=3,
287
- info="Enter a website URL to analyze, or paste text content directly"
288
- )
289
- ],
290
- outputs=[
291
- gr.JSON(label="Extracted Entities & Relationships"),
292
- gr.Image(label="Knowledge Graph Visualization"),
293
- gr.Markdown(label="Analysis Summary")
294
- ],
295
- title="🧠 AI Knowledge Graph Builder",
296
- description="""
297
- **Transform any text or webpage into an interactive knowledge graph!**
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
298
 
299
- This tool uses AI to:
300
- 1. 📖 Extract content from URLs or analyze your text
301
- 2. 🤖 Use Mistral AI to identify entities and relationships
302
- 3. 🕸️ Build and visualize knowledge graphs
303
- 4. 📊 Provide detailed analysis summaries
 
 
 
304
 
305
- **Examples to try:**
306
- - News articles: `https://www.bbc.com/news`
307
- - Wikipedia pages: `https://en.wikipedia.org/wiki/Artificial_intelligence`
308
- - Direct text: Copy and paste any article or document
309
- """,
310
- examples=[
311
- ["https://en.wikipedia.org/wiki/Machine_learning"],
312
- ["Artificial intelligence is transforming the world. Companies like OpenAI, Google, and Microsoft are leading the development of large language models. These models are being used in applications ranging from chatbots to code generation."],
313
- ["https://www.nature.com/articles/d41586-023-00057-9"]
314
- ],
315
- theme=gr.themes.Soft()
316
- )
317
-
318
- demo.launch(mcp_server=True)
 
11
  from huggingface_hub import InferenceClient
12
  import re
13
  from urllib.parse import urlparse
14
+ import warnings
15
+
16
+ # Configure matplotlib for better font handling
17
+ plt.rcParams['font.family'] = ['DejaVu Sans', 'Arial', 'Liberation Sans']
18
+ plt.rcParams['font.size'] = 10
19
+ warnings.filterwarnings('ignore', category=UserWarning)
20
+
21
+ def clean_text_for_display(text):
22
+ """Clean text to remove characters that might cause font issues."""
23
+ if not isinstance(text, str):
24
+ return str(text)
25
+
26
+ # Remove or replace problematic characters
27
+ text = re.sub(r'[^\x00-\x7F]+', '', text) # Remove non-ASCII characters
28
+ text = re.sub(r'\s+', ' ', text).strip() # Normalize whitespace
29
+ return text[:50] if len(text) > 50 else text # Limit length for display
30
 
31
  def fetch_content(url_or_text):
32
  """Fetch content from URL or return text directly.
 
37
  Returns:
38
  Extracted text content
39
  """
40
+ try:
41
+ # Check if input looks like a URL
42
+ parsed = urlparse(url_or_text)
43
+ if parsed.scheme in ['http', 'https']:
44
+ try:
45
+ headers = {
46
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
47
+ }
48
+ response = requests.get(url_or_text, headers=headers, timeout=10)
49
+ response.raise_for_status()
50
+
51
+ # Parse HTML and extract text
52
+ soup = BeautifulSoup(response.content, 'html.parser')
53
+
54
+ # Remove script and style elements
55
+ for script in soup(["script", "style"]):
56
+ script.decompose()
57
+
58
+ # Get text and clean it up
59
+ text = soup.get_text()
60
+ lines = (line.strip() for line in text.splitlines())
61
+ chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
62
+ text = ' '.join(chunk for chunk in chunks if chunk)
63
+
64
+ return text[:5000] # Limit to first 5000 characters
65
+ except Exception as e:
66
+ return f"Error fetching URL: {str(e)}"
67
+ else:
68
+ # It's direct text input
69
+ return url_or_text
70
+ except Exception as e:
71
+ return f"Error processing input: {str(e)}"
72
+
73
+ def simple_entity_extraction(text):
74
+ """Fallback entity extraction when AI is not available."""
75
+ try:
76
+ words = text.split()
77
+ entities = []
78
+
79
+ # Simple heuristic: words that are capitalized and longer than 2 characters
80
+ seen = set()
81
+ for word in words[:30]: # Limit to first 30 words
82
+ clean_word = re.sub(r'[^\w]', '', word)
83
+ if (clean_word.istitle() and len(clean_word) > 2 and
84
+ clean_word.lower() not in seen and
85
+ clean_word not in ['The', 'This', 'That', 'When', 'Where', 'How']):
86
+ entities.append({
87
+ "name": clean_text_for_display(clean_word),
88
+ "type": "CONCEPT",
89
+ "description": "Auto-detected entity"
90
+ })
91
+ seen.add(clean_word.lower())
92
+
93
+ # Create some basic relationships
94
+ relationships = []
95
+ if len(entities) > 1:
96
+ for i in range(min(len(entities) - 1, 5)): # Max 5 relationships
97
+ relationships.append({
98
+ "source": entities[i]["name"],
99
+ "target": entities[i + 1]["name"],
100
+ "relation": "related_to",
101
+ "description": "Sequential relationship"
102
+ })
103
+
104
+ return {"entities": entities[:10], "relationships": relationships}
105
+ except Exception as e:
106
+ return {
107
+ "entities": [{"name": "Error", "type": "ERROR", "description": str(e)}],
108
+ "relationships": []
109
+ }
110
 
111
  def extract_entities(text):
112
+ """Extract entities and relationships using Mistral AI with fallback.
113
 
114
  Args:
115
  text: Input text to analyze
 
118
  Dictionary containing entities and relationships
119
  """
120
  try:
121
+ # Check if HF_TOKEN is available
122
+ hf_token = os.environ.get("HF_TOKEN")
123
+ if not hf_token:
124
+ print("No HF_TOKEN found, using simple extraction")
125
+ return simple_entity_extraction(text)
126
+
127
  client = InferenceClient(
128
  provider="together",
129
+ api_key=hf_token,
130
  )
131
 
132
  prompt = f"""
 
134
  1. Named entities (people, organizations, locations, concepts)
135
  2. Relationships between these entities
136
 
137
+ Return ONLY a valid JSON object with this structure:
138
  {{
139
  "entities": [
140
+ {{"name": "entity_name", "type": "PERSON", "description": "brief description"}}
141
  ],
142
  "relationships": [
143
  {{"source": "entity1", "target": "entity2", "relation": "relationship_type", "description": "brief description"}}
144
  ]
145
  }}
146
 
147
+ Text to analyze: {text[:1500]}
148
+ """
 
 
149
 
150
  completion = client.chat.completions.create(
151
  model="mistralai/Mistral-Small-24B-Instruct-2501",
152
+ messages=[{"role": "user", "content": prompt}],
153
+ max_tokens=1000,
154
+ temperature=0.1,
 
 
 
 
 
155
  )
156
 
157
  response_text = completion.choices[0].message.content
158
 
159
+ # Clean and extract JSON
160
  json_match = re.search(r'\{.*\}', response_text, re.DOTALL)
161
  if json_match:
162
  json_str = json_match.group()
163
+ # Clean the JSON string
164
+ json_str = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', json_str) # Remove control characters
165
+
166
+ parsed_data = json.loads(json_str)
167
+
168
+ # Clean entity names for display
169
+ if "entities" in parsed_data:
170
+ for entity in parsed_data["entities"]:
171
+ if "name" in entity:
172
+ entity["name"] = clean_text_for_display(entity["name"])
173
 
174
+ return parsed_data
175
+ else:
176
+ print("No valid JSON found in AI response, using fallback")
177
+ return simple_entity_extraction(text)
178
 
179
  except Exception as e:
180
+ print(f"AI extraction failed: {e}, using fallback")
181
+ return simple_entity_extraction(text)
182
 
183
  def build_knowledge_graph(entities_data):
184
  """Build and visualize knowledge graph.
 
195
 
196
  # Add nodes (entities)
197
  entities = entities_data.get("entities", [])
198
+ for entity in entities[:15]: # Limit to 15 entities for better visualization
199
+ clean_name = clean_text_for_display(entity.get("name", "Unknown"))
200
+ if clean_name and len(clean_name.strip()) > 0:
201
+ G.add_node(clean_name,
202
+ type=entity.get("type", "UNKNOWN"),
203
+ description=entity.get("description", ""))
204
 
205
  # Add edges (relationships)
206
  relationships = entities_data.get("relationships", [])
207
  for rel in relationships:
208
+ source = clean_text_for_display(rel.get("source", ""))
209
+ target = clean_text_for_display(rel.get("target", ""))
210
+ if source in G.nodes and target in G.nodes:
211
+ G.add_edge(source, target,
212
  relation=rel.get("relation", "related"),
213
  description=rel.get("description", ""))
214
 
215
  # If no relationships found, create some connections between entities
216
+ if len(relationships) == 0 and len(list(G.nodes())) > 1:
217
+ node_list = list(G.nodes())
218
+ for i in range(min(len(node_list) - 1, 5)):
219
+ G.add_edge(node_list[i], node_list[i + 1], relation="related")
220
 
221
  # Create visualization
222
+ fig, ax = plt.subplots(figsize=(10, 8))
223
+
224
+ # Skip if no nodes
225
+ if len(G.nodes()) == 0:
226
+ ax.text(0.5, 0.5, "No entities found to visualize",
227
+ ha='center', va='center', fontsize=14, transform=ax.transAxes)
228
+ ax.set_title("Knowledge Graph")
229
+ ax.axis('off')
230
+ else:
231
+ # Position nodes using spring layout
232
+ pos = nx.spring_layout(G, k=1, iterations=50)
233
+
234
+ # Color nodes by type
235
+ node_colors = []
236
+ type_colors = {
237
+ "PERSON": "#FF6B6B",
238
+ "ORG": "#4ECDC4",
239
+ "LOCATION": "#45B7D1",
240
+ "CONCEPT": "#96CEB4",
241
+ "ERROR": "#FF0000",
242
+ "UNKNOWN": "#DDA0DD"
243
+ }
244
+
245
+ for node in G.nodes():
246
+ node_type = G.nodes[node].get('type', 'UNKNOWN')
247
+ node_colors.append(type_colors.get(node_type, "#DDA0DD"))
248
+
249
+ # Draw the graph
250
+ nx.draw(G, pos,
251
+ node_color=node_colors,
252
+ node_size=800,
253
+ font_size=8,
254
+ font_weight='bold',
255
+ with_labels=True,
256
+ edge_color='gray',
257
+ width=1.5,
258
+ alpha=0.8,
259
+ ax=ax)
260
+
261
+ # Add title
262
+ ax.set_title("Knowledge Graph", size=14, weight='bold')
 
 
 
263
 
264
  # Convert to PIL Image
265
  fig.canvas.draw()
 
273
  return pil_image
274
 
275
  except Exception as e:
276
+ # Create simple error image
277
  fig, ax = plt.subplots(figsize=(8, 6))
278
+ ax.text(0.5, 0.5, f"Error creating graph",
279
  ha='center', va='center', fontsize=12, transform=ax.transAxes)
280
  ax.set_title("Knowledge Graph Error")
281
  ax.axis('off')
282
 
 
283
  fig.canvas.draw()
284
  img_array = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
285
  img_array = img_array.reshape(fig.canvas.get_width_height()[::-1] + (3,))
 
300
  Tuple of (entities_json, graph_image, summary)
301
  """
302
  try:
303
+ if not url_or_text or len(url_or_text.strip()) == 0:
304
+ return "{}", None, "Please provide some text or a URL to analyze."
305
+
306
  # Step 1: Fetch content
307
  content = fetch_content(url_or_text)
308
 
309
  if content.startswith("Error"):
310
+ return json.dumps({"error": content}), None, content
311
 
312
  # Step 2: Extract entities
313
  entities_data = extract_entities(content)
 
319
  num_entities = len(entities_data.get("entities", []))
320
  num_relationships = len(entities_data.get("relationships", []))
321
 
322
+ summary = f"""## Knowledge Graph Analysis Complete!
323
+
324
+ 📊 **Statistics:**
325
+ - Entities found: {num_entities}
326
+ - Relationships found: {num_relationships}
327
+ - Content length: {len(content)} characters
328
+
329
+ 🔍 **Extracted Entities:**"""
330
 
331
+ for entity in entities_data.get("entities", [])[:8]: # Show first 8
332
+ name = entity.get('name', 'Unknown')
333
+ entity_type = entity.get('type', 'UNKNOWN')
334
+ desc = entity.get('description', 'No description')
335
+ summary += f"\n• **{name}** ({entity_type}): {desc}"
336
 
337
+ if len(entities_data.get("entities", [])) > 8:
338
+ summary += f"\n\n... and {len(entities_data.get('entities', [])) - 8} more entities"
339
 
340
+ # Ensure valid JSON output
341
+ try:
342
+ json_output = json.dumps(entities_data, indent=2, ensure_ascii=True)
343
+ except Exception as e:
344
+ json_output = json.dumps({"error": f"JSON serialization failed: {str(e)}"})
345
 
346
+ return json_output, graph_image, summary
347
 
348
  except Exception as e:
349
+ error_msg = f"Analysis failed: {str(e)}"
350
+ return json.dumps({"error": error_msg}), None, error_msg
351
 
352
+ # Create Gradio interface with error handling
353
+ try:
354
+ demo = gr.Interface(
355
+ fn=knowledge_graph_builder,
356
+ inputs=[
357
+ gr.Textbox(
358
+ label="URL or Text Input",
359
+ placeholder="Enter a URL (https://example.com) or paste text directly...",
360
+ lines=3,
361
+ info="Enter a website URL to analyze, or paste text content directly"
362
+ )
363
+ ],
364
+ outputs=[
365
+ gr.JSON(label="Extracted Entities & Relationships"),
366
+ gr.Image(label="Knowledge Graph Visualization", type="pil"),
367
+ gr.Markdown(label="Analysis Summary")
368
+ ],
369
+ title="🧠 AI Knowledge Graph Builder",
370
+ description="""
371
+ **Transform any text or webpage into an interactive knowledge graph!**
372
+
373
+ This tool:
374
+ 1. 📖 Extracts content from URLs or analyzes your text
375
+ 2. 🤖 Uses AI to identify entities and relationships
376
+ 3. 🕸️ Builds and visualizes knowledge graphs
377
+ 4. 📊 Provides detailed analysis summaries
378
+
379
+ **Examples to try:**
380
+ - News articles, Wikipedia pages, or any text content
381
+ """,
382
+ examples=[
383
+ ["Artificial intelligence companies like OpenAI, Google, and Microsoft are developing large language models for various applications."],
384
+ ["https://en.wikipedia.org/wiki/Machine_learning"],
385
+ ],
386
+ theme=gr.themes.Soft(),
387
+ allow_flagging="never"
388
+ )
389
 
390
+ if __name__ == "__main__":
391
+ demo.launch(mcp_server=True, share=False)
392
+
393
+ except Exception as e:
394
+ print(f"Failed to create Gradio interface: {e}")
395
+ # Create a simple fallback interface
396
+ def simple_demo(text):
397
+ return f"Error: {e}", None, "Application failed to start properly."
398
 
399
+ demo = gr.Interface(
400
+ fn=simple_demo,
401
+ inputs="text",
402
+ outputs=["text", "image", "text"],
403
+ title="Error - Knowledge Graph Builder"
404
+ )
405
+
406
+ if __name__ == "__main__":
407
+ demo.launch(mcp_server=True, share=False)