Saiyaswanth007 commited on
Commit
a905808
·
1 Parent(s): 17cb251

Experiment 3

Browse files
Files changed (1) hide show
  1. ui.py +621 -552
ui.py CHANGED
@@ -1,15 +1,46 @@
1
  import gradio as gr
2
  from fastapi import FastAPI
 
 
3
  from shared import DEFAULT_CHANGE_THRESHOLD, DEFAULT_MAX_SPEAKERS, ABSOLUTE_MAX_SPEAKERS, FINAL_TRANSCRIPTION_MODEL, REALTIME_TRANSCRIPTION_MODEL
4
- import os
 
5
 
6
  # Connection configuration (separate signaling server from model server)
7
- # These will be replaced with environment variables or defaults
8
- RENDER_SIGNALING_URL = os.environ.get("RENDER_SIGNALING_URL", "wss://render-signal-audio.onrender.com/stream")
9
- HF_SPACE_URL = os.environ.get("HF_SPACE_URL", "https://androidguy-speaker-diarization.hf.space")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  def build_ui():
12
- """Build Gradio UI for speaker diarization with improved reliability"""
13
  with gr.Blocks(title="Real-time Speaker Diarization", theme=gr.themes.Soft()) as demo:
14
  # Add configuration variables to page using custom component
15
  gr.HTML(
@@ -26,7 +57,7 @@ def build_ui():
26
 
27
  # Header and description
28
  gr.Markdown("# 🎤 Live Speaker Diarization")
29
- gr.Markdown("Real-time speech recognition with automatic speaker identification")
30
 
31
  # Add transcription model info
32
  gr.Markdown(f"**Using Models:** Final: {FINAL_TRANSCRIPTION_MODEL}, Realtime: {REALTIME_TRANSCRIPTION_MODEL}")
@@ -37,8 +68,7 @@ def build_ui():
37
  <span id="status-text" style="color:#888;">Waiting to connect...</span>
38
  <span id="status-icon" style="width:10px; height:10px; display:inline-block;
39
  background-color:#888; border-radius:50%; margin-left:5px;"></span>
40
- </div>""",
41
- elem_id="connection-status"
42
  )
43
 
44
  with gr.Row():
@@ -52,607 +82,625 @@ def build_ui():
52
  </div>
53
 
54
  <script>
55
- // Global variables
56
- let rtcConnection;
57
- let mediaStream;
58
- let wsConnection;
59
- let statusUpdateInterval;
60
- let isOfflineMode = false;
 
 
 
 
 
61
 
62
- // Check connection to HF space with timeout
63
- async function checkHfConnection() {
64
- try {
65
- const controller = new AbortController();
66
- const timeoutId = setTimeout(() => controller.abort(), 5000);
67
-
68
- const response = await fetch(`${window.HF_SPACE_URL}/health`, {
69
- signal: controller.signal
70
- });
71
- clearTimeout(timeoutId);
72
- return response.ok;
73
- } catch (err) {
74
- console.warn("HF Space connection failed:", err);
75
- return false;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  }
77
- }
78
 
79
- // Start the connection and audio streaming with robust error handling
80
- async function startStreaming() {
81
- try {
82
- // Update status
83
- updateStatus('connecting');
84
-
85
- // First check backend connectivity
86
- const backendAvailable = await checkHfConnection();
87
- isOfflineMode = !backendAvailable;
88
-
89
- // Request microphone access - this works even offline
90
- try {
91
- mediaStream = await navigator.mediaDevices.getUserMedia({audio: {
92
- echoCancellation: true,
93
- noiseSuppression: true,
94
- autoGainControl: true
95
- }});
96
- } catch (micErr) {
97
- console.error('Microphone access error:', micErr);
98
- updateStatus('error', 'Microphone access denied: ' + micErr.message);
99
  return;
100
  }
101
 
102
- if (backendAvailable) {
103
- // Try WebRTC connection
104
- try {
105
- await setupWebRTC();
106
- } catch (rtcErr) {
107
- console.error("WebRTC setup failed:", rtcErr);
108
- // Continue even if WebRTC fails
109
- }
110
 
111
- // Try WebSocket connection
112
- try {
113
- setupWebSocket();
114
- } catch (wsErr) {
115
- console.error("WebSocket setup failed:", wsErr);
116
- // Continue even if WebSocket fails
 
 
 
 
 
 
 
 
 
 
117
  }
118
 
119
- updateStatus('connected');
120
- document.getElementById("conversation").innerHTML = "<i>Connected! Start speaking...</i>";
121
- } else {
122
- updateStatus('warning', 'Running in offline mode - limited functionality');
123
- document.getElementById("conversation").innerHTML =
124
- "<i>Backend connection failed. Microphone active but transcription unavailable.</i>";
 
 
 
 
 
125
  }
126
-
127
- // Start status update interval regardless
128
- statusUpdateInterval = setInterval(updateConnectionInfo, 5000);
129
-
130
- } catch (err) {
131
- console.error('Error starting stream:', err);
132
- updateStatus('error', err.message);
133
- }
134
- }
135
-
136
- // Set up WebRTC connection to Render signaling server
137
- async function setupWebRTC() {
138
- try {
139
- if (rtcConnection) {
140
- rtcConnection.close();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  }
142
-
143
- // Use FastRTC's connection approach
144
- const pc = new RTCPeerConnection({
145
- iceServers: [
146
- { urls: 'stun:stun.l.google.com:19302' },
147
- { urls: 'stun:stun1.l.google.com:19302' }
148
- ]
149
- });
150
-
151
- // Add audio track
152
- mediaStream.getAudioTracks().forEach(track => {
153
- pc.addTrack(track, mediaStream);
154
- });
155
-
156
- // Connect to FastRTC signaling via WebSocket with timeout
157
- const signalWs = new WebSocket(window.RENDER_SIGNALING_URL);
158
-
159
- // Set connection timeout
160
- const connectionTimeout = setTimeout(() => {
161
- if (signalWs.readyState !== WebSocket.OPEN) {
162
- signalWs.close();
163
- throw new Error("WebRTC signaling connection timeout");
164
  }
165
- }, 10000);
166
-
167
- // Wait for connection to open
168
- await new Promise((resolve, reject) => {
169
- signalWs.onopen = () => {
170
- clearTimeout(connectionTimeout);
171
- resolve();
172
- };
173
- signalWs.onerror = (err) => {
174
- clearTimeout(connectionTimeout);
175
- reject(new Error("WebRTC signaling connection failed"));
176
- };
177
- });
178
-
179
- // Handle signaling messages
180
- signalWs.onmessage = async (event) => {
181
- try {
182
- const message = JSON.parse(event.data);
 
 
 
 
183
 
184
- if (message.type === 'offer') {
185
- await pc.setRemoteDescription(new RTCSessionDescription(message));
186
- const answer = await pc.createAnswer();
187
- await pc.setLocalDescription(answer);
188
- signalWs.send(JSON.stringify(pc.localDescription));
189
- } else if (message.type === 'candidate') {
190
- if (message.candidate) {
191
  await pc.addIceCandidate(new RTCIceCandidate(message));
192
  }
 
 
193
  }
194
- } catch (err) {
195
- console.error("Error processing signaling message:", err);
196
- }
197
- };
198
-
199
- // Send ICE candidates
200
- pc.onicecandidate = (event) => {
201
- if (event.candidate) {
202
- signalWs.send(JSON.stringify({
203
- type: 'candidate',
204
- candidate: event.candidate
205
- }));
206
- }
207
- };
208
-
209
- // Keep connection reference
210
- rtcConnection = pc;
211
-
212
- // Wait for connection to be established with timeout
213
- await new Promise((resolve, reject) => {
214
- const timeout = setTimeout(() => reject(new Error("WebRTC connection timeout")), 15000);
215
 
 
216
  pc.onconnectionstatechange = () => {
217
- console.log("WebRTC connection state:", pc.connectionState);
218
- if (pc.connectionState === 'connected') {
219
- clearTimeout(timeout);
220
- resolve();
221
- } else if (pc.connectionState === 'failed' || pc.connectionState === 'disconnected' || pc.connectionState === 'closed') {
222
- clearTimeout(timeout);
223
- reject(new Error("WebRTC connection failed"));
224
  }
225
  };
226
 
227
- // Also check ice connection state as fallback
228
- pc.oniceconnectionstatechange = () => {
229
- console.log("ICE connection state:", pc.iceConnectionState);
230
- if (pc.iceConnectionState === 'connected' || pc.iceConnectionState === 'completed') {
231
- clearTimeout(timeout);
232
- resolve();
233
- } else if (pc.iceConnectionState === 'failed' || pc.iceConnectionState === 'disconnected' || pc.iceConnectionState === 'closed') {
234
- clearTimeout(timeout);
235
- reject(new Error("ICE connection failed"));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
236
  }
237
  };
238
- });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
239
 
240
- updateStatus('connected');
241
- console.log("WebRTC connection established successfully");
 
 
 
242
 
243
- } catch (err) {
244
- console.error('WebRTC setup error:', err);
245
- updateStatus('warning', 'WebRTC setup issue: ' + err.message);
246
- throw err;
247
- }
248
- }
249
-
250
- // Set up WebSocket connection to HF Space for conversation updates
251
- function setupWebSocket() {
252
- try {
253
- // Close existing connection if any
254
- if (wsConnection) {
255
- wsConnection.close();
256
  }
257
 
258
- const wsUrl = window.RENDER_SIGNALING_URL.replace('stream', 'ws_relay');
259
- wsConnection = new WebSocket(wsUrl);
 
 
 
260
 
261
- // Set connection timeout
262
- const connectionTimeout = setTimeout(() => {
263
- if (wsConnection.readyState !== WebSocket.OPEN) {
264
- wsConnection.close();
265
- throw new Error("WebSocket connection timeout");
 
 
 
 
 
 
 
 
 
 
 
266
  }
267
- }, 10000);
268
-
269
- wsConnection.onopen = () => {
270
- clearTimeout(connectionTimeout);
271
- console.log('WebSocket connection established');
272
- };
273
-
274
- wsConnection.onmessage = (event) => {
275
- try {
276
- // Parse the JSON message
277
- const message = JSON.parse(event.data);
 
 
 
 
 
278
 
279
- // Process different message types
280
- switch(message.type) {
281
- case 'transcription':
282
- // Handle transcription data
283
- if (message && message.data && typeof message.data === 'object') {
284
- document.getElementById("conversation").innerHTML = message.data.conversation_html ||
285
- JSON.stringify(message.data);
286
- }
287
- break;
288
-
289
- case 'processing_result':
290
- // Handle individual audio chunk processing result
291
- console.log('Processing result:', message.data);
292
-
293
- // Update status info if needed
294
- if (message.data && message.data.status === "processed") {
295
- const statusElem = document.getElementById('status-text');
296
- if (statusElem) {
297
- const speakerId = message.data.speaker_id !== undefined ?
298
- `Speaker ${message.data.speaker_id + 1}` : '';
299
-
300
- if (speakerId) {
301
- statusElem.textContent = `Connected - ${speakerId} active`;
302
- }
303
- }
304
- } else if (message.data && message.data.status === "error") {
305
- updateStatus('error', message.data.message || 'Processing error');
306
- }
307
- break;
308
-
309
- case 'connection':
310
- console.log('Connection status:', message.status);
311
- updateStatus(message.status === 'connected' ? 'connected' : 'warning');
312
- break;
313
-
314
- case 'connection_established':
315
- console.log('Connection established:', message);
316
- updateStatus('connected');
317
-
318
- // If initial conversation is provided, display it
319
- if (message.conversation) {
320
- document.getElementById("conversation").innerHTML = message.conversation;
321
- }
322
- break;
323
-
324
- case 'conversation_update':
325
- if (message.conversation_html) {
326
- document.getElementById("conversation").innerHTML = message.conversation_html;
327
- }
328
- break;
329
-
330
- case 'conversation_cleared':
331
- document.getElementById("conversation").innerHTML =
332
- "<i>Conversation cleared. Start speaking again...</i>";
333
- break;
334
-
335
- case 'error':
336
- console.error('Error message from server:', message.message);
337
- updateStatus('warning', message.message);
338
- break;
339
-
340
- default:
341
- // If it's just HTML content without proper JSON structure (legacy format)
342
- document.getElementById("conversation").innerHTML = event.data;
343
  }
 
344
 
345
- // Auto-scroll to bottom
346
- const container = document.getElementById("conversation");
347
- container.scrollTop = container.scrollHeight;
348
- } catch (e) {
349
- // Fallback for non-JSON messages (legacy format)
350
- document.getElementById("conversation").innerHTML = event.data;
351
 
352
- // Auto-scroll to bottom
353
- const container = document.getElementById("conversation");
354
- container.scrollTop = container.scrollHeight;
355
- }
356
- };
357
-
358
- wsConnection.onerror = (error) => {
359
- clearTimeout(connectionTimeout);
360
- console.error('WebSocket error:', error);
361
- updateStatus('warning', 'WebSocket error');
362
- };
 
 
 
 
 
363
 
364
- wsConnection.onclose = () => {
365
- console.log('WebSocket connection closed');
366
- // Try to reconnect after a delay if not in offline mode
367
- if (!isOfflineMode) {
368
- setTimeout(() => {
369
- try {
370
- setupWebSocket();
371
- } catch (e) {
372
- console.error("Failed to reconnect WebSocket:", e);
373
- }
374
- }, 3000);
375
  }
376
- };
377
- } catch (err) {
378
- console.error("WebSocket setup error:", err);
379
- throw err;
 
 
 
 
 
 
 
 
 
 
 
 
 
380
  }
381
- }
382
 
383
- // Update connection info in the UI with better error handling
384
- async function updateConnectionInfo() {
385
- try {
386
- const hfConnected = await checkHfConnection();
387
-
388
- if (!hfConnected) {
389
- // If we were online but now offline, update mode
390
- if (!isOfflineMode) {
391
- isOfflineMode = true;
392
- updateStatus('warning', 'Backend unavailable - limited functionality');
393
- }
394
- } else {
395
- // If we were offline but now online, update mode
396
- if (isOfflineMode) {
397
- isOfflineMode = false;
398
 
399
- // Try to reconnect services
400
- try {
401
- if (!rtcConnection || rtcConnection.connectionState !== 'connected') {
402
- await setupWebRTC();
403
- }
404
-
405
- if (!wsConnection || wsConnection.readyState !== WebSocket.OPEN) {
406
- setupWebSocket();
407
  }
408
-
409
- updateStatus('connected');
410
- } catch (e) {
411
- console.warn("Failed to reconnect services:", e);
412
- updateStatus('warning', 'Connection partially restored');
413
  }
414
- } else if (rtcConnection?.connectionState === 'connected' ||
415
- rtcConnection?.iceConnectionState === 'connected') {
416
- updateStatus('connected');
417
  } else {
418
- updateStatus('warning', 'Connection unstable');
419
-
420
- // Try to reconnect if needed
421
- if (!rtcConnection ||
422
- rtcConnection.connectionState === 'failed' ||
423
- rtcConnection.connectionState === 'disconnected') {
424
- try {
425
- await setupWebRTC();
426
- } catch (e) {
427
- console.warn("Failed to reconnect WebRTC:", e);
428
- }
429
- }
430
-
431
- if (!wsConnection || wsConnection.readyState !== WebSocket.OPEN) {
432
- try {
433
- setupWebSocket();
434
- } catch (e) {
435
- console.warn("Failed to reconnect WebSocket:", e);
436
- }
437
- }
438
  }
 
 
439
  }
440
- } catch (err) {
441
- console.error('Error updating connection info:', err);
442
- // Don't update status here to avoid flickering
443
  }
444
- }
445
 
446
- // Update status indicator
447
- function updateStatus(status, message = '') {
448
- const statusText = document.getElementById('status-text');
449
- const statusIcon = document.getElementById('status-icon');
450
-
451
- if (!statusText || !statusIcon) return;
452
-
453
- switch(status) {
454
- case 'connected':
455
- statusText.textContent = 'Connected';
456
- statusIcon.style.backgroundColor = '#4CAF50';
457
- break;
458
- case 'connecting':
459
- statusText.textContent = 'Connecting...';
460
- statusIcon.style.backgroundColor = '#FFC107';
461
- break;
462
- case 'disconnected':
463
- statusText.textContent = 'Disconnected';
464
- statusIcon.style.backgroundColor = '#9E9E9E';
465
- break;
466
- case 'error':
467
- statusText.textContent = 'Error: ' + message;
468
- statusIcon.style.backgroundColor = '#F44336';
469
- break;
470
- case 'warning':
471
- statusText.textContent = 'Warning: ' + message;
472
- statusIcon.style.backgroundColor = '#FF9800';
473
- break;
474
- default:
475
- statusText.textContent = 'Unknown';
476
- statusIcon.style.backgroundColor = '#9E9E9E';
477
- }
478
- }
479
-
480
- // Stop streaming and clean up
481
- function stopStreaming() {
482
- // Close WebRTC connection
483
- if (rtcConnection) {
484
- rtcConnection.close();
485
- rtcConnection = null;
486
- }
487
-
488
- // Close WebSocket
489
- if (wsConnection) {
490
- wsConnection.close();
491
- wsConnection = null;
492
- }
493
-
494
- // Stop all tracks in media stream
495
- if (mediaStream) {
496
- mediaStream.getTracks().forEach(track => track.stop());
497
- mediaStream = null;
498
- }
499
-
500
- // Clear interval
501
- if (statusUpdateInterval) {
502
- clearInterval(statusUpdateInterval);
503
- statusUpdateInterval = null;
504
  }
505
-
506
- // Update status
507
- updateStatus('disconnected');
508
- }
509
 
510
- // Clear conversation with better error handling and offline mode support
511
- function clearConversation() {
512
- // First update the UI immediately regardless of backend availability
513
- document.getElementById("conversation").innerHTML =
514
- "<i>Conversation cleared. Start speaking again...</i>";
515
-
516
- // Then try to update on the backend if available
517
- if (!isOfflineMode) {
518
- checkHfConnection().then(isConnected => {
 
519
  if (isConnected) {
520
- return fetch(`${window.HF_SPACE_URL}/clear`, {
521
- method: 'POST'
 
 
 
522
  });
523
- } else {
524
- throw new Error("Backend unavailable");
 
 
 
 
525
  }
526
- })
527
- .then(resp => resp.json())
528
- .then(data => {
529
- console.log("Backend conversation cleared successfully");
530
- })
531
- .catch(err => {
532
  console.warn("Backend clear API failed:", err);
533
- // No need to update UI again as we already did it above
534
- });
535
- }
536
- }
537
-
538
- // Update settings with better error handling and offline mode support
539
- function updateSettings() {
540
- const threshold = document.querySelector('input[data-testid="threshold-slider"]')?.value ||
541
- document.getElementById('threshold-slider')?.value;
542
- const maxSpeakers = document.querySelector('input[data-testid="speakers-slider"]')?.value ||
543
- document.getElementById('speakers-slider')?.value;
544
-
545
- if (!threshold || !maxSpeakers) {
546
- console.error("Could not find slider values");
547
- return;
548
- }
549
-
550
- // First update the UI immediately regardless of API success
551
- const statusOutput = document.getElementById('status-output');
552
- if (statusOutput) {
553
- statusOutput.innerHTML = `
554
- <h2>System Status</h2>
555
- <p>Settings updated:</p>
556
- <ul>
557
- <li>Threshold: ${threshold}</li>
558
- <li>Max Speakers: ${maxSpeakers}</li>
559
- </ul>
560
- <p>Transcription Models:</p>
561
- <ul>
562
- <li>Final: ${window.FINAL_TRANSCRIPTION_MODEL || "distil-large-v3"}</li>
563
- <li>Realtime: ${window.REALTIME_TRANSCRIPTION_MODEL || "distil-small.en"}</li>
564
- </ul>
565
- `;
566
- }
567
 
568
- // Then try to update on the backend if available and not in offline mode
569
- if (!isOfflineMode) {
570
- checkHfConnection().then(isConnected => {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
571
  if (isConnected) {
572
- return fetch(`${window.HF_SPACE_URL}/settings?threshold=${threshold}&max_speakers=${maxSpeakers}`, {
573
- method: 'POST'
574
- });
575
- } else {
576
- throw new Error("Backend unavailable");
 
 
 
 
 
 
 
 
 
 
577
  }
578
- })
579
- .then(resp => resp.json())
580
- .then(data => {
581
- console.log("Backend settings updated successfully:", data);
582
- })
583
- .catch(err => {
584
  console.warn("Backend settings update failed:", err);
585
- // No need to update UI again as we already did it above
586
- });
587
  }
588
- }
589
-
590
- // Set up event listeners when the DOM is loaded
591
- document.addEventListener('DOMContentLoaded', () => {
592
- updateStatus('disconnected');
593
-
594
- // Function to find and bind buttons with retries
595
- function findAndBindButtons() {
596
- // Try to find buttons by ID first (most reliable)
597
- let startBtn = document.getElementById('btn-start');
598
- let stopBtn = document.getElementById('btn-stop');
599
- let clearBtn = document.getElementById('btn-clear');
600
- let updateBtn = document.getElementById('btn-update');
601
 
602
- // Fallback to aria-label if IDs aren't found
603
- if (!startBtn) startBtn = document.querySelector('button[aria-label="Start Listening"]');
604
- if (!stopBtn) stopBtn = document.querySelector('button[aria-label="Stop"]');
605
- if (!clearBtn) stopBtn = document.querySelector('button[aria-label="Clear"]');
606
- if (!updateBtn) updateBtn = document.querySelector('button[aria-label="Update Settings"]');
 
607
 
608
- // Fallback to text content as last resort
609
- if (!startBtn) startBtn = Array.from(document.querySelectorAll('button')).find(btn => btn.textContent.includes('Start'));
610
- if (!stopBtn) stopBtn = Array.from(document.querySelectorAll('button')).find(btn => btn.textContent.includes('Stop'));
611
- if (!clearBtn) clearBtn = Array.from(document.querySelectorAll('button')).find(btn => btn.textContent.includes('Clear'));
612
- if (!updateBtn) updateBtn = Array.from(document.querySelectorAll('button')).find(btn => btn.textContent.includes('Update'));
613
 
614
- // Check if all buttons are found
615
- const buttonsFound = startBtn && stopBtn && clearBtn && updateBtn;
616
 
617
- if (buttonsFound) {
618
- console.log("All buttons found, binding events");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
619
 
620
- // Bind event handlers
621
- startBtn.onclick = () => startStreaming();
622
- stopBtn.onclick = () => stopStreaming();
623
- clearBtn.onclick = () => clearConversation();
624
- updateBtn.onclick = () => updateSettings();
625
 
626
- // Add data attributes to make it clear these are bound
627
- startBtn.setAttribute('data-bound', 'true');
628
- stopBtn.setAttribute('data-bound', 'true');
629
- clearBtn.setAttribute('data-bound', 'true');
630
- updateBtn.setAttribute('data-bound', 'true');
631
 
 
632
  return true;
633
- } else {
634
- console.log("Not all buttons found, will retry");
635
- return false;
636
  }
637
- }
638
-
639
- // Try to bind immediately
640
- if (!findAndBindButtons()) {
641
- // If not successful, set up a retry mechanism
642
- let retryCount = 0;
643
- const maxRetries = 20; // More retries, longer interval
644
- const retryInterval = 300; // 300ms between retries
645
 
646
- const retryBinding = setInterval(() => {
647
- if (findAndBindButtons() || ++retryCount >= maxRetries) {
648
- clearInterval(retryBinding);
649
- if (retryCount >= maxRetries) {
650
- console.warn("Failed to find all buttons after maximum retries");
651
- }
652
- }
653
- }, retryInterval);
654
  }
655
- });
 
 
 
 
 
 
 
656
  </script>
657
  """,
658
  label="Live Conversation"
@@ -720,22 +768,26 @@ def build_ui():
720
  - 🟠 Speaker 8 (Gold)
721
  """)
722
 
723
- # Function to get backend status (for periodic updates)
724
  def get_status():
725
  """API call to get system status - called periodically"""
726
- import requests
727
  try:
728
- # Use a short timeout to prevent UI hanging
729
- resp = requests.get(f"{HF_SPACE_URL}/status", timeout=2)
730
  if resp.status_code == 200:
731
- return resp.json().get('formatted_text', 'No status information')
732
- return "Error getting status"
 
 
 
 
 
733
  except Exception as e:
734
- return f"Status update unavailable: Backend may be offline"
735
 
736
- # Set up periodic status updates with shorter interval and error handling
737
- status_timer = gr.Timer(10) # 10 seconds between updates
738
  status_timer.tick(fn=get_status, outputs=status_output)
 
739
 
740
  return demo
741
 
@@ -746,6 +798,23 @@ def mount_ui(app: FastAPI):
746
  """Mount Gradio app to FastAPI"""
747
  app.mount("/ui", demo.app)
748
 
 
 
 
 
749
  # For standalone testing
750
  if __name__ == "__main__":
751
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  from fastapi import FastAPI
3
+ import requests
4
+ import json
5
  from shared import DEFAULT_CHANGE_THRESHOLD, DEFAULT_MAX_SPEAKERS, ABSOLUTE_MAX_SPEAKERS, FINAL_TRANSCRIPTION_MODEL, REALTIME_TRANSCRIPTION_MODEL
6
+
7
+ print(gr.__version__)
8
 
9
  # Connection configuration (separate signaling server from model server)
10
+ # These will be replaced at deployment time with the correct URLs
11
+ RENDER_SIGNALING_URL = "wss://render-signal-audio.onrender.com/stream"
12
+ HF_SPACE_URL = "https://androidguy-speaker-diarization.hf.space"
13
+
14
+ class ResourceManager:
15
+ """Manages cleanup of resources"""
16
+ def __init__(self):
17
+ self.timers = []
18
+ self.cleanup_callbacks = []
19
+
20
+ def add_timer(self, timer):
21
+ self.timers.append(timer)
22
+
23
+ def add_cleanup_callback(self, callback):
24
+ self.cleanup_callbacks.append(callback)
25
+
26
+ def cleanup(self):
27
+ for timer in self.timers:
28
+ try:
29
+ timer.stop()
30
+ except:
31
+ pass
32
+
33
+ for callback in self.cleanup_callbacks:
34
+ try:
35
+ callback()
36
+ except:
37
+ pass
38
+
39
+ # Global resource manager
40
+ resource_manager = ResourceManager()
41
 
42
  def build_ui():
43
+ """Build Gradio UI for speaker diarization"""
44
  with gr.Blocks(title="Real-time Speaker Diarization", theme=gr.themes.Soft()) as demo:
45
  # Add configuration variables to page using custom component
46
  gr.HTML(
 
57
 
58
  # Header and description
59
  gr.Markdown("# 🎤 Live Speaker Diarization")
60
+ gr.Markdown(f"Real-time speech recognition with automatic speaker identification")
61
 
62
  # Add transcription model info
63
  gr.Markdown(f"**Using Models:** Final: {FINAL_TRANSCRIPTION_MODEL}, Realtime: {REALTIME_TRANSCRIPTION_MODEL}")
 
68
  <span id="status-text" style="color:#888;">Waiting to connect...</span>
69
  <span id="status-icon" style="width:10px; height:10px; display:inline-block;
70
  background-color:#888; border-radius:50%; margin-left:5px;"></span>
71
+ </div>"""
 
72
  )
73
 
74
  with gr.Row():
 
82
  </div>
83
 
84
  <script>
85
+ // Global state management
86
+ const AppState = {
87
+ rtcConnection: null,
88
+ mediaStream: null,
89
+ wsConnection: null,
90
+ statusUpdateInterval: null,
91
+ wsReconnectAttempts: 0,
92
+ maxReconnectAttempts: 5,
93
+ isConnecting: false,
94
+ isCleaningUp: false
95
+ };
96
 
97
+ // Utility functions
98
+ const Utils = {
99
+ // Check connection to HF space with timeout
100
+ async checkHfConnection(timeout = 5000) {
101
+ try {
102
+ const controller = new AbortController();
103
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
104
+
105
+ const response = await fetch(`${window.HF_SPACE_URL}/health`, {
106
+ signal: controller.signal,
107
+ method: 'GET',
108
+ cache: 'no-cache'
109
+ });
110
+
111
+ clearTimeout(timeoutId);
112
+ return response.ok;
113
+ } catch (err) {
114
+ console.warn("HF Space connection failed:", err);
115
+ return false;
116
+ }
117
+ },
118
+
119
+ // Safe JSON parse
120
+ safeJsonParse(str) {
121
+ try {
122
+ return JSON.parse(str);
123
+ } catch (e) {
124
+ return null;
125
+ }
126
+ },
127
+
128
+ // Debounce function
129
+ debounce(func, wait) {
130
+ let timeout;
131
+ return function executedFunction(...args) {
132
+ const later = () => {
133
+ clearTimeout(timeout);
134
+ func(...args);
135
+ };
136
+ clearTimeout(timeout);
137
+ timeout = setTimeout(later, wait);
138
+ };
139
  }
140
+ };
141
 
142
+ // Main streaming control
143
+ const StreamController = {
144
+ async start() {
145
+ if (AppState.isConnecting || AppState.isCleaningUp) {
146
+ console.log("Already connecting or cleaning up, ignoring start request");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
  return;
148
  }
149
 
150
+ AppState.isConnecting = true;
151
+
152
+ try {
153
+ // Update status
154
+ StatusManager.update('connecting');
155
+
156
+ // Request microphone access with proper error handling
157
+ await this.setupMediaStream();
158
 
159
+ // Check backend availability
160
+ const backendAvailable = await Utils.checkHfConnection();
161
+
162
+ // Setup connections
163
+ if (backendAvailable) {
164
+ await Promise.allSettled([
165
+ this.setupWebRTC(),
166
+ this.setupWebSocket()
167
+ ]);
168
+
169
+ StatusManager.update('connected');
170
+ document.getElementById("conversation").innerHTML = "<i>Connected! Start speaking...</i>";
171
+ } else {
172
+ StatusManager.update('warning', 'Backend unavailable - limited functionality');
173
+ document.getElementById("conversation").innerHTML =
174
+ "<i>Backend connection failed. Microphone active but transcription unavailable.</i>";
175
  }
176
 
177
+ // Start status monitoring
178
+ AppState.statusUpdateInterval = setInterval(() => {
179
+ ConnectionMonitor.updateConnectionInfo();
180
+ }, 5000);
181
+
182
+ } catch (err) {
183
+ console.error('Error starting stream:', err);
184
+ StatusManager.update('error', err.message);
185
+ this.cleanup();
186
+ } finally {
187
+ AppState.isConnecting = false;
188
  }
189
+ },
190
+
191
+ async setupMediaStream() {
192
+ try {
193
+ AppState.mediaStream = await navigator.mediaDevices.getUserMedia({
194
+ audio: {
195
+ echoCancellation: true,
196
+ noiseSuppression: true,
197
+ autoGainControl: true,
198
+ sampleRate: 16000 // Specify sample rate for consistency
199
+ }
200
+ });
201
+ } catch (err) {
202
+ let errorMessage;
203
+ switch (err.name) {
204
+ case 'NotAllowedError':
205
+ errorMessage = 'Microphone access denied. Please allow microphone access and try again.';
206
+ break;
207
+ case 'NotFoundError':
208
+ errorMessage = 'No microphone found. Please connect a microphone and try again.';
209
+ break;
210
+ case 'NotReadableError':
211
+ errorMessage = 'Microphone is being used by another application.';
212
+ break;
213
+ case 'OverconstrainedError':
214
+ errorMessage = 'Microphone constraints cannot be satisfied.';
215
+ break;
216
+ default:
217
+ errorMessage = `Microphone error: ${err.message}`;
218
+ }
219
+ throw new Error(errorMessage);
220
  }
221
+ },
222
+
223
+ async setupWebRTC() {
224
+ try {
225
+ // Close existing connection
226
+ if (AppState.rtcConnection) {
227
+ AppState.rtcConnection.close();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
228
  }
229
+
230
+ const pc = new RTCPeerConnection({
231
+ iceServers: [
232
+ { urls: 'stun:stun.l.google.com:19302' },
233
+ { urls: 'stun:stun1.l.google.com:19302' }
234
+ ]
235
+ });
236
+
237
+ // Add audio track
238
+ if (AppState.mediaStream) {
239
+ AppState.mediaStream.getAudioTracks().forEach(track => {
240
+ pc.addTrack(track, AppState.mediaStream);
241
+ });
242
+ }
243
+
244
+ // Connect to signaling server
245
+ const signalWs = new WebSocket(window.RENDER_SIGNALING_URL);
246
+
247
+ // Handle signaling messages
248
+ signalWs.onmessage = async (event) => {
249
+ const message = Utils.safeJsonParse(event.data);
250
+ if (!message) return;
251
 
252
+ try {
253
+ if (message.type === 'offer') {
254
+ await pc.setRemoteDescription(new RTCSessionDescription(message));
255
+ const answer = await pc.createAnswer();
256
+ await pc.setLocalDescription(answer);
257
+ signalWs.send(JSON.stringify(pc.localDescription));
258
+ } else if (message.type === 'candidate' && message.candidate) {
259
  await pc.addIceCandidate(new RTCIceCandidate(message));
260
  }
261
+ } catch (err) {
262
+ console.error('Error handling signaling message:', err);
263
  }
264
+ };
265
+
266
+ // Send ICE candidates
267
+ pc.onicecandidate = (event) => {
268
+ if (event.candidate && signalWs.readyState === WebSocket.OPEN) {
269
+ signalWs.send(JSON.stringify({
270
+ type: 'candidate',
271
+ candidate: event.candidate
272
+ }));
273
+ }
274
+ };
 
 
 
 
 
 
 
 
 
 
275
 
276
+ // Handle connection state changes
277
  pc.onconnectionstatechange = () => {
278
+ console.log('WebRTC connection state:', pc.connectionState);
279
+ if (pc.connectionState === 'failed' || pc.connectionState === 'disconnected') {
280
+ StatusManager.update('warning', 'WebRTC connection lost');
 
 
 
 
281
  }
282
  };
283
 
284
+ AppState.rtcConnection = pc;
285
+
286
+ // Wait for connection with timeout
287
+ await new Promise((resolve, reject) => {
288
+ const timeout = setTimeout(() => {
289
+ reject(new Error("WebRTC connection timeout (15s)"));
290
+ }, 15000);
291
+
292
+ pc.onconnectionstatechange = () => {
293
+ if (pc.connectionState === 'connected') {
294
+ clearTimeout(timeout);
295
+ resolve();
296
+ } else if (pc.connectionState === 'failed') {
297
+ clearTimeout(timeout);
298
+ reject(new Error("WebRTC connection failed"));
299
+ }
300
+ };
301
+ });
302
+
303
+ } catch (err) {
304
+ console.error('WebRTC setup error:', err);
305
+ throw new Error(`WebRTC setup failed: ${err.message}`);
306
+ }
307
+ },
308
+
309
+ setupWebSocket() {
310
+ try {
311
+ // Close existing connection
312
+ if (AppState.wsConnection) {
313
+ AppState.wsConnection.close();
314
+ }
315
+
316
+ const wsUrl = window.RENDER_SIGNALING_URL.replace('/stream', '/ws_relay');
317
+ AppState.wsConnection = new WebSocket(wsUrl);
318
+
319
+ AppState.wsConnection.onopen = () => {
320
+ console.log('WebSocket connection established');
321
+ AppState.wsReconnectAttempts = 0; // Reset on successful connection
322
+ };
323
+
324
+ AppState.wsConnection.onmessage = (event) => {
325
+ MessageHandler.process(event.data);
326
+ };
327
+
328
+ AppState.wsConnection.onerror = (error) => {
329
+ console.error('WebSocket error:', error);
330
+ StatusManager.update('warning', 'WebSocket error');
331
+ };
332
+
333
+ AppState.wsConnection.onclose = () => {
334
+ console.log('WebSocket connection closed');
335
+
336
+ // Only attempt reconnection if not cleaning up and under limit
337
+ if (!AppState.isCleaningUp &&
338
+ AppState.wsReconnectAttempts < AppState.maxReconnectAttempts) {
339
+
340
+ AppState.wsReconnectAttempts++;
341
+ const delay = Math.min(3000 * AppState.wsReconnectAttempts, 30000); // Max 30s delay
342
+
343
+ console.log(`Attempting WebSocket reconnection ${AppState.wsReconnectAttempts}/${AppState.maxReconnectAttempts} in ${delay}ms`);
344
+
345
+ setTimeout(() => {
346
+ if (!AppState.isCleaningUp) {
347
+ this.setupWebSocket();
348
+ }
349
+ }, delay);
350
+ } else if (AppState.wsReconnectAttempts >= AppState.maxReconnectAttempts) {
351
+ StatusManager.update('error', 'Max WebSocket reconnection attempts reached');
352
  }
353
  };
354
+
355
+ } catch (err) {
356
+ console.error('WebSocket setup error:', err);
357
+ throw new Error(`WebSocket setup failed: ${err.message}`);
358
+ }
359
+ },
360
+
361
+ stop() {
362
+ AppState.isCleaningUp = true;
363
+ this.cleanup();
364
+ StatusManager.update('disconnected');
365
+ AppState.isCleaningUp = false;
366
+ },
367
+
368
+ cleanup() {
369
+ // Close WebRTC connection
370
+ if (AppState.rtcConnection) {
371
+ AppState.rtcConnection.close();
372
+ AppState.rtcConnection = null;
373
+ }
374
 
375
+ // Close WebSocket
376
+ if (AppState.wsConnection) {
377
+ AppState.wsConnection.close();
378
+ AppState.wsConnection = null;
379
+ }
380
 
381
+ // Stop media stream
382
+ if (AppState.mediaStream) {
383
+ AppState.mediaStream.getTracks().forEach(track => track.stop());
384
+ AppState.mediaStream = null;
 
 
 
 
 
 
 
 
 
385
  }
386
 
387
+ // Clear intervals
388
+ if (AppState.statusUpdateInterval) {
389
+ clearInterval(AppState.statusUpdateInterval);
390
+ AppState.statusUpdateInterval = null;
391
+ }
392
 
393
+ // Reset reconnection attempts
394
+ AppState.wsReconnectAttempts = 0;
395
+ }
396
+ };
397
+
398
+ // Message handling
399
+ const MessageHandler = {
400
+ process(data) {
401
+ try {
402
+ const message = Utils.safeJsonParse(data);
403
+
404
+ if (message) {
405
+ this.handleStructuredMessage(message);
406
+ } else {
407
+ // Fallback for plain HTML content
408
+ this.updateConversationDisplay(data);
409
  }
410
+
411
+ this.autoScroll();
412
+ } catch (e) {
413
+ console.error('Error processing message:', e);
414
+ this.updateConversationDisplay(data);
415
+ this.autoScroll();
416
+ }
417
+ },
418
+
419
+ handleStructuredMessage(message) {
420
+ switch(message.type) {
421
+ case 'transcription':
422
+ if (message.data && message.data.conversation_html) {
423
+ this.updateConversationDisplay(message.data.conversation_html);
424
+ }
425
+ break;
426
 
427
+ case 'processing_result':
428
+ this.handleProcessingResult(message.data);
429
+ break;
430
+
431
+ case 'connection':
432
+ StatusManager.update(message.status === 'connected' ? 'connected' : 'warning');
433
+ break;
434
+
435
+ case 'connection_established':
436
+ StatusManager.update('connected');
437
+ if (message.conversation) {
438
+ this.updateConversationDisplay(message.conversation);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
439
  }
440
+ break;
441
 
442
+ case 'conversation_update':
443
+ if (message.conversation_html) {
444
+ this.updateConversationDisplay(message.conversation_html);
445
+ }
446
+ break;
 
447
 
448
+ case 'conversation_cleared':
449
+ this.updateConversationDisplay("<i>Conversation cleared. Start speaking again...</i>");
450
+ break;
451
+
452
+ case 'error':
453
+ console.error('Server error:', message.message);
454
+ StatusManager.update('warning', message.message);
455
+ break;
456
+
457
+ default:
458
+ console.log('Unknown message type:', message.type);
459
+ }
460
+ },
461
+
462
+ handleProcessingResult(data) {
463
+ if (!data) return;
464
 
465
+ if (data.status === "processed" && data.speaker_id !== undefined) {
466
+ const statusElem = document.getElementById('status-text');
467
+ if (statusElem) {
468
+ const speakerId = `Speaker ${data.speaker_id + 1}`;
469
+ statusElem.textContent = `Connected - ${speakerId} active`;
 
 
 
 
 
 
470
  }
471
+ } else if (data.status === "error") {
472
+ StatusManager.update('error', data.message || 'Processing error');
473
+ }
474
+ },
475
+
476
+ updateConversationDisplay(content) {
477
+ const container = document.getElementById("conversation");
478
+ if (container) {
479
+ container.innerHTML = content;
480
+ }
481
+ },
482
+
483
+ autoScroll() {
484
+ const container = document.getElementById("conversation");
485
+ if (container) {
486
+ container.scrollTop = container.scrollHeight;
487
+ }
488
  }
489
+ };
490
 
491
+ // Connection monitoring
492
+ const ConnectionMonitor = {
493
+ async updateConnectionInfo() {
494
+ try {
495
+ const hfConnected = await Utils.checkHfConnection(3000);
496
+
497
+ if (!hfConnected) {
498
+ StatusManager.update('warning', 'Backend unavailable');
 
 
 
 
 
 
 
499
 
500
+ // Try to reconnect WebSocket only if not already trying
501
+ if (!AppState.wsConnection || AppState.wsConnection.readyState !== WebSocket.OPEN) {
502
+ if (AppState.wsReconnectAttempts < AppState.maxReconnectAttempts) {
503
+ StreamController.setupWebSocket();
 
 
 
 
504
  }
 
 
 
 
 
505
  }
506
+ } else if (AppState.rtcConnection?.connectionState === 'connected' ||
507
+ AppState.rtcConnection?.iceConnectionState === 'connected') {
508
+ StatusManager.update('connected');
509
  } else {
510
+ StatusManager.update('warning', 'Connection unstable');
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
511
  }
512
+ } catch (err) {
513
+ console.error('Error updating connection info:', err);
514
  }
 
 
 
515
  }
516
+ };
517
 
518
+ // Status management
519
+ const StatusManager = {
520
+ update(status, message = '') {
521
+ const statusText = document.getElementById('status-text');
522
+ const statusIcon = document.getElementById('status-icon');
523
+
524
+ if (!statusText || !statusIcon) return;
525
+
526
+ switch(status) {
527
+ case 'connected':
528
+ statusText.textContent = message || 'Connected';
529
+ statusIcon.style.backgroundColor = '#4CAF50';
530
+ break;
531
+ case 'connecting':
532
+ statusText.textContent = 'Connecting...';
533
+ statusIcon.style.backgroundColor = '#FFC107';
534
+ break;
535
+ case 'disconnected':
536
+ statusText.textContent = 'Disconnected';
537
+ statusIcon.style.backgroundColor = '#9E9E9E';
538
+ break;
539
+ case 'error':
540
+ statusText.textContent = `Error: ${message}`;
541
+ statusIcon.style.backgroundColor = '#F44336';
542
+ break;
543
+ case 'warning':
544
+ statusText.textContent = `Warning: ${message}`;
545
+ statusIcon.style.backgroundColor = '#FF9800';
546
+ break;
547
+ default:
548
+ statusText.textContent = 'Unknown';
549
+ statusIcon.style.backgroundColor = '#9E9E9E';
550
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
551
  }
552
+ };
 
 
 
553
 
554
+ // API functions
555
+ const ApiManager = {
556
+ async clearConversation() {
557
+ // Update UI immediately
558
+ document.getElementById("conversation").innerHTML =
559
+ "<i>Conversation cleared. Start speaking again...</i>";
560
+
561
+ // Try backend API
562
+ try {
563
+ const isConnected = await Utils.checkHfConnection();
564
  if (isConnected) {
565
+ const response = await fetch(`${window.HF_SPACE_URL}/clear`, {
566
+ method: 'POST',
567
+ headers: {
568
+ 'Content-Type': 'application/json'
569
+ }
570
  });
571
+
572
+ if (!response.ok) {
573
+ throw new Error(`HTTP ${response.status}`);
574
+ }
575
+
576
+ console.log("Backend conversation cleared successfully");
577
  }
578
+ } catch (err) {
 
 
 
 
 
579
  console.warn("Backend clear API failed:", err);
580
+ }
581
+ },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
582
 
583
+ async updateSettings() {
584
+ const threshold = document.querySelector('input[data-testid="threshold-slider"]')?.value || 0.7;
585
+ const maxSpeakers = document.querySelector('input[data-testid="speakers-slider"]')?.value || 4;
586
+
587
+ // Update UI immediately
588
+ const statusOutput = document.getElementById('status-output');
589
+ if (statusOutput) {
590
+ statusOutput.innerHTML = `
591
+ <h2>System Status</h2>
592
+ <p>Settings updated:</p>
593
+ <ul>
594
+ <li>Threshold: ${threshold}</li>
595
+ <li>Max Speakers: ${maxSpeakers}</li>
596
+ </ul>
597
+ <p>Transcription Models:</p>
598
+ <ul>
599
+ <li>Final: ${window.FINAL_TRANSCRIPTION_MODEL || "distil-large-v3"}</li>
600
+ <li>Realtime: ${window.REALTIME_TRANSCRIPTION_MODEL || "distil-small.en"}</li>
601
+ </ul>
602
+ `;
603
+ }
604
+
605
+ // Try backend API
606
+ try {
607
+ const isConnected = await Utils.checkHfConnection();
608
  if (isConnected) {
609
+ const response = await fetch(
610
+ `${window.HF_SPACE_URL}/settings?threshold=${threshold}&max_speakers=${maxSpeakers}`,
611
+ {
612
+ method: 'POST',
613
+ headers: {
614
+ 'Content-Type': 'application/json'
615
+ }
616
+ }
617
+ );
618
+
619
+ if (!response.ok) {
620
+ throw new Error(`HTTP ${response.status}`);
621
+ }
622
+
623
+ console.log("Backend settings updated successfully");
624
  }
625
+ } catch (err) {
 
 
 
 
 
626
  console.warn("Backend settings update failed:", err);
627
+ }
 
628
  }
629
+ };
630
+
631
+ // DOM initialization
632
+ const DOMManager = {
633
+ init() {
634
+ StatusManager.update('disconnected');
 
 
 
 
 
 
 
635
 
636
+ // Use MutationObserver for reliable button detection
637
+ const observer = new MutationObserver(Utils.debounce(() => {
638
+ if (this.bindButtons()) {
639
+ observer.disconnect();
640
+ }
641
+ }, 100));
642
 
643
+ observer.observe(document.body, {
644
+ childList: true,
645
+ subtree: true
646
+ });
 
647
 
648
+ // Fallback: try binding immediately
649
+ this.bindButtons();
650
 
651
+ // Cleanup on page unload
652
+ window.addEventListener('beforeunload', () => {
653
+ StreamController.cleanup();
654
+ });
655
+ },
656
+
657
+ bindButtons() {
658
+ const buttons = {
659
+ start: document.getElementById('btn-start') ||
660
+ document.querySelector('button[aria-label="Start Listening"]'),
661
+ stop: document.getElementById('btn-stop') ||
662
+ document.querySelector('button[aria-label="Stop"]'),
663
+ clear: document.getElementById('btn-clear') ||
664
+ document.querySelector('button[aria-label="Clear"]'),
665
+ update: document.getElementById('btn-update') ||
666
+ document.querySelector('button[aria-label="Update Settings"]')
667
+ };
668
+
669
+ const allFound = Object.values(buttons).every(btn => btn !== null);
670
+
671
+ if (allFound) {
672
+ // Remove existing listeners to prevent duplicates
673
+ Object.values(buttons).forEach(btn => {
674
+ if (btn.dataset.bound !== 'true') {
675
+ btn.onclick = null;
676
+ }
677
+ });
678
 
679
+ // Bind new listeners
680
+ buttons.start.onclick = () => StreamController.start();
681
+ buttons.stop.onclick = () => StreamController.stop();
682
+ buttons.clear.onclick = () => ApiManager.clearConversation();
683
+ buttons.update.onclick = () => ApiManager.updateSettings();
684
 
685
+ // Mark as bound
686
+ Object.values(buttons).forEach(btn => {
687
+ btn.dataset.bound = 'true';
688
+ });
 
689
 
690
+ console.log("All buttons bound successfully");
691
  return true;
 
 
 
692
  }
 
 
 
 
 
 
 
 
693
 
694
+ return false;
 
 
 
 
 
 
 
695
  }
696
+ };
697
+
698
+ // Initialize when DOM is ready
699
+ if (document.readyState === 'loading') {
700
+ document.addEventListener('DOMContentLoaded', () => DOMManager.init());
701
+ } else {
702
+ DOMManager.init();
703
+ }
704
  </script>
705
  """,
706
  label="Live Conversation"
 
768
  - 🟠 Speaker 8 (Gold)
769
  """)
770
 
771
+ # Set up periodic status updates with proper error handling
772
  def get_status():
773
  """API call to get system status - called periodically"""
 
774
  try:
775
+ resp = requests.get(f"{HF_SPACE_URL}/status", timeout=5)
 
776
  if resp.status_code == 200:
777
+ data = resp.json()
778
+ return data.get('status', 'No status information')
779
+ return f"HTTP {resp.status_code}"
780
+ except requests.exceptions.Timeout:
781
+ return "Connection timeout"
782
+ except requests.exceptions.ConnectionError:
783
+ return "Connection error - backend unavailable"
784
  except Exception as e:
785
+ return f"Error: {str(e)}"
786
 
787
+ # Create timer and add to resource manager
788
+ status_timer = gr.Timer(5)
789
  status_timer.tick(fn=get_status, outputs=status_output)
790
+ resource_manager.add_timer(status_timer)
791
 
792
  return demo
793
 
 
798
  """Mount Gradio app to FastAPI"""
799
  app.mount("/ui", demo.app)
800
 
801
+ def cleanup_resources():
802
+ """Cleanup function to be called on app shutdown"""
803
+ resource_manager.cleanup()
804
+
805
  # For standalone testing
806
  if __name__ == "__main__":
807
+ try:
808
+ demo.launch(
809
+ share=False,
810
+ debug=False,
811
+ show_error=True,
812
+ server_name="0.0.0.0",
813
+ server_port=7860
814
+ )
815
+ except KeyboardInterrupt:
816
+ print("\nShutting down...")
817
+ cleanup_resources()
818
+ except Exception as e:
819
+ print(f"Error launching demo: {e}")
820
+ cleanup_resources()