Abe commited on
Commit
ef6b90e
Β·
1 Parent(s): f6ee34b

initial commit

Browse files
Files changed (3) hide show
  1. .gitignore +68 -0
  2. app.py +539 -0
  3. requirements.txt +3 -0
.gitignore ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Reference files (not to be checked in)
2
+ bulk_import.py
3
+
4
+ # Python
5
+ __pycache__/
6
+ *.py[cod]
7
+ *$py.class
8
+ *.so
9
+ .Python
10
+ build/
11
+ develop-eggs/
12
+ dist/
13
+ downloads/
14
+ eggs/
15
+ .eggs/
16
+ lib/
17
+ lib64/
18
+ parts/
19
+ sdist/
20
+ var/
21
+ wheels/
22
+ pip-wheel-metadata/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # Virtual environments
30
+ .env
31
+ .venv
32
+ env/
33
+ venv/
34
+ ENV/
35
+ env.bak/
36
+ venv.bak/
37
+
38
+ # IDE
39
+ .idea/
40
+ .vscode/
41
+ *.swp
42
+ *.swo
43
+ *~
44
+
45
+ # OS
46
+ .DS_Store
47
+ .DS_Store?
48
+ ._*
49
+ .Spotlight-V100
50
+ .Trashes
51
+ ehthumbs.db
52
+ Thumbs.db
53
+
54
+ # Temporary files
55
+ *.tmp
56
+ *.temp
57
+ temp/
58
+ tmp/
59
+
60
+ # Logs
61
+ *.log
62
+ logs/
63
+
64
+ # Environment variables
65
+ .env.local
66
+ .env.development.local
67
+ .env.test.local
68
+ .env.production.local
app.py ADDED
@@ -0,0 +1,539 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ import gradio as gr
3
+ import os
4
+ import hashlib
5
+ import json
6
+ import traceback
7
+ import zipfile
8
+ import tempfile
9
+ import shutil
10
+ from pathlib import Path
11
+ import requests
12
+ import threading
13
+ import time
14
+ from typing import List, Dict, Optional, Tuple
15
+ from dataclasses import dataclass
16
+ from concurrent.futures import ThreadPoolExecutor, as_completed
17
+
18
+ # Global variables for progress tracking
19
+ upload_progress = {"current": 0, "total": 0, "status": "", "files_processed": [], "errors": [], "final_summary": ""}
20
+ upload_lock = threading.Lock()
21
+
22
+ @dataclass
23
+ class ProcessResult:
24
+ filename: str
25
+ status: str # "skipped", "uploaded", "error"
26
+ message: str
27
+ file_hash: Optional[str] = None
28
+
29
+ def calculate_sha256(filepath: Path) -> str:
30
+ """Calculate SHA256 hash of a file"""
31
+ sha256_hash = hashlib.sha256()
32
+ with open(filepath, "rb") as f:
33
+ # Read in 100MB chunks for better performance
34
+ for byte_block in iter(lambda: f.read(104857600), b""):
35
+ sha256_hash.update(byte_block)
36
+ return sha256_hash.hexdigest()
37
+
38
+ def check_hash_exists(file_hash: str) -> bool:
39
+ """Check if file hash already exists in datadrones.com"""
40
+ try:
41
+ hash_request = requests.get(
42
+ f"https://dl.datadrones.com/api/model/sha256sum/{file_hash}",
43
+ timeout=10
44
+ )
45
+ return hash_request.status_code == 200
46
+ except Exception as e:
47
+ print(f"Error checking hash existence: {e}")
48
+ return False
49
+
50
+ def find_by_hash(file_hash: str) -> Optional[Dict]:
51
+ """Find metadata by hash from Civitai and other sources"""
52
+ # Get Civitai API key from environment variable (HuggingFace Spaces secret)
53
+ civitai_api_key = os.getenv("CIVITAI_API_KEY")
54
+
55
+ header = {
56
+ "Content-Type": "application/json",
57
+ }
58
+
59
+ # Only add Authorization header if API key is available
60
+ if civitai_api_key:
61
+ header["Authorization"] = f"Bearer {civitai_api_key}"
62
+ else:
63
+ print("⚠️ Warning: CIVITAI_API_KEY not found in environment variables")
64
+
65
+ print(f"Retrieving metadata by hash {file_hash}")
66
+
67
+ # Try Civitai first
68
+ try:
69
+ response = requests.get(
70
+ f"https://civitai.com/api/v1/model-versions/by-hash/{file_hash}",
71
+ headers=header,
72
+ timeout=15
73
+ )
74
+ if response.status_code == 200:
75
+ civitai_data = {"civitai": response.json()}
76
+ return civitai_data
77
+ except Exception as e:
78
+ print(f"Civitai API error: {e}")
79
+
80
+ # Try civitaiarchive as fallback
81
+ try:
82
+ response = requests.get(f"https://civitaiarchive.com/api/sha256/{file_hash}", timeout=15)
83
+ if response.status_code == 200:
84
+ civitai_data = {"civitai": response.json()}
85
+ return civitai_data
86
+ except Exception as e:
87
+ print(f"CivitaiArchive API error: {e}")
88
+
89
+ return None
90
+
91
+ def submit_to_datadrones(model_path: Path, metadata: Dict) -> bool:
92
+ """Submit file to datadrones.com"""
93
+ try:
94
+ print(f"πŸš€ Starting upload of {model_path.name} to datadrones.com...")
95
+
96
+ # Extract metadata fields
97
+ description = ""
98
+ model_name = None
99
+ base_model = None
100
+ tags = None
101
+ model_type = None
102
+ is_nsfw = False
103
+
104
+ # Process metadata structure
105
+ model_name = (metadata.get("model_name") or
106
+ metadata.get("civitai", {}).get("name") or
107
+ metadata.get("name"))
108
+
109
+ civitai = metadata.get("civitai", {})
110
+
111
+ if civitai:
112
+ is_nsfw = civitai.get("nsfw", False)
113
+
114
+ # Handle model versions
115
+ if "modelVersions" in civitai:
116
+ model_versions = civitai.get("modelVersions")
117
+ if model_versions:
118
+ base_model = model_versions[0].get("baseModel")
119
+
120
+ # Handle direct model data
121
+ if "model" in civitai:
122
+ model = civitai["model"]
123
+ model_type = model.get("type")
124
+ is_nsfw = model.get("nsfw", False)
125
+ model_name = model.get("name")
126
+ model_description = model.get("description")
127
+ tags = model.get("tags")
128
+ if model_description:
129
+ description += f"{model_description}\n"
130
+
131
+ if "type" in civitai:
132
+ model_type = civitai.get("type")
133
+
134
+ if "baseModel" in civitai:
135
+ base_model = civitai.get("baseModel")
136
+ if base_model == "Hunyuan Video":
137
+ base_model = "HunyuanVideo"
138
+
139
+ if "description" in civitai:
140
+ description += f"{civitai['description']}\n"
141
+
142
+ if model_name:
143
+ description = f"{model_name}\n{description}"
144
+
145
+ if not description.strip():
146
+ description = "Model uploaded via bulk uploader"
147
+
148
+ # Handle tags
149
+ if not tags and metadata.get("tags"):
150
+ tags = ",".join(metadata.get("tags", []))
151
+ elif isinstance(tags, list):
152
+ tags = ",".join(tags)
153
+
154
+ # Prepare form data
155
+ data = {
156
+ "description": description.strip(),
157
+ "base_model": base_model if base_model else "Other",
158
+ "tags": tags if tags else "",
159
+ "model_type": model_type if model_type else "LoRA",
160
+ "is_nsfw": is_nsfw,
161
+ }
162
+
163
+ print(f"πŸ“‹ Upload data for {model_path.name}:")
164
+ print(f" - Model name: {model_name}")
165
+ print(f" - Model type: {data['model_type']}")
166
+ print(f" - Base model: {data['base_model']}")
167
+ print(f" - NSFW: {data['is_nsfw']}")
168
+ print(f" - Tags: {data['tags']}")
169
+ print(f" - Description length: {len(data['description'])} chars")
170
+ print(f" - File size: {model_path.stat().st_size / (1024*1024):.1f} MB")
171
+
172
+ with open(model_path, "rb") as f:
173
+ files = {"file": f}
174
+ headers = {'Host': 'up.datadrones.com'}
175
+
176
+ print(f"🌐 Making POST request to https://up.datadrones.com/upload for {model_path.name}...")
177
+
178
+ response = requests.post(
179
+ "https://up.datadrones.com/upload",
180
+ files=files,
181
+ data=data,
182
+ headers=headers,
183
+ timeout=300 # 5 minute timeout for large files
184
+ )
185
+
186
+ print(f"πŸ“‘ Response for {model_path.name}:")
187
+ print(f" - Status code: {response.status_code}")
188
+ print(f" - Response headers: {dict(response.headers)}")
189
+ print(f" - Response text (first 500 chars): {response.text[:500]}")
190
+
191
+ success = response.status_code == 200
192
+ if success:
193
+ print(f"βœ… Upload successful for {model_path.name}")
194
+ else:
195
+ print(f"❌ Upload failed for {model_path.name} - Status: {response.status_code}")
196
+ print(f" - Full response: {response.text}")
197
+
198
+ return success
199
+
200
+ except Exception as e:
201
+ print(f"πŸ’₯ Exception during upload of {model_path.name}: {e}")
202
+ traceback.print_exc()
203
+ return False
204
+
205
+ def extract_model_files(uploaded_files: List) -> List[Path]:
206
+ """Extract model files from uploaded files, handling both direct files and zip archives"""
207
+ model_files = []
208
+ temp_dir = Path(tempfile.mkdtemp())
209
+
210
+ # Supported model file extensions
211
+ supported_extensions = {'.safetensors', '.pt', '.bin'}
212
+
213
+ for file_info in uploaded_files:
214
+ file_path = Path(file_info.name)
215
+
216
+ if file_path.suffix.lower() in supported_extensions:
217
+ # Direct model file
218
+ dest_path = temp_dir / file_path.name
219
+ shutil.copy2(file_path, dest_path)
220
+ model_files.append(dest_path)
221
+
222
+ elif file_path.suffix.lower() == '.zip':
223
+ # Extract zip and find model files
224
+ try:
225
+ with zipfile.ZipFile(file_path, 'r') as zip_ref:
226
+ zip_ref.extractall(temp_dir)
227
+
228
+ # Find all model files in extracted content
229
+ for extension in supported_extensions:
230
+ for extracted_file in temp_dir.rglob(f"*{extension}"):
231
+ model_files.append(extracted_file)
232
+
233
+ except Exception as e:
234
+ print(f"Error extracting {file_path}: {e}")
235
+
236
+ return model_files
237
+
238
+ def process_single_file(model_file: Path) -> ProcessResult:
239
+ """Process a single model file"""
240
+ try:
241
+ print(f"\nπŸ” Processing file: {model_file.name}")
242
+
243
+ # Check file size (skip if over 4GB)
244
+ file_size = model_file.stat().st_size
245
+ if file_size > 4 * 1024 * 1024 * 1024: # 4GB
246
+ print(f"⏭️ Skipping {model_file.name} - over 4GB limit")
247
+ return ProcessResult(
248
+ filename=model_file.name,
249
+ status="skipped",
250
+ message="File over 4GB size limit"
251
+ )
252
+
253
+ # Calculate hash
254
+ print(f"πŸ”’ Calculating hash for {model_file.name}...")
255
+ file_hash = calculate_sha256(model_file)
256
+ print(f"πŸ“ Hash: {file_hash}")
257
+
258
+ # Check if already exists in datadrones
259
+ print(f"πŸ” Checking if {file_hash} already exists on datadrones.com...")
260
+ if check_hash_exists(file_hash):
261
+ print(f"⏭️ File {model_file.name} already exists on datadrones.com")
262
+ return ProcessResult(
263
+ filename=model_file.name,
264
+ status="skipped",
265
+ message="Already exists in datadrones.com",
266
+ file_hash=file_hash
267
+ )
268
+
269
+ # Find metadata by hash
270
+ print(f"πŸ” Looking up metadata for {file_hash}...")
271
+ metadata = find_by_hash(file_hash)
272
+
273
+ if not metadata:
274
+ print(f"❌ No metadata found for {model_file.name}")
275
+ return ProcessResult(
276
+ filename=model_file.name,
277
+ status="error",
278
+ message="No metadata found for this file",
279
+ file_hash=file_hash
280
+ )
281
+
282
+ print(f"βœ… Found metadata for {model_file.name}")
283
+
284
+ # Submit to datadrones
285
+ print(f"πŸš€ Attempting upload of {model_file.name} to datadrones.com...")
286
+ if submit_to_datadrones(model_file, metadata):
287
+ print(f"βœ… Successfully uploaded {model_file.name} to datadrones.com")
288
+ return ProcessResult(
289
+ filename=model_file.name,
290
+ status="uploaded",
291
+ message="Successfully uploaded to datadrones.com",
292
+ file_hash=file_hash
293
+ )
294
+ else:
295
+ print(f"❌ Failed to upload {model_file.name} to datadrones.com")
296
+ return ProcessResult(
297
+ filename=model_file.name,
298
+ status="error",
299
+ message="Failed to upload to datadrones.com",
300
+ file_hash=file_hash
301
+ )
302
+
303
+ except Exception as e:
304
+ print(f"πŸ’₯ Error processing {model_file.name}: {e}")
305
+ traceback.print_exc()
306
+ return ProcessResult(
307
+ filename=model_file.name,
308
+ status="error",
309
+ message=f"Processing error: {str(e)}"
310
+ )
311
+
312
+ def update_progress(current: int, total: int, status: str, file_result: ProcessResult = None, final_summary: str = None):
313
+ """Update global progress tracking"""
314
+ with upload_lock:
315
+ upload_progress["current"] = current
316
+ upload_progress["total"] = total
317
+ upload_progress["status"] = status
318
+
319
+ # Store final summary when processing is complete
320
+ if final_summary:
321
+ upload_progress["final_summary"] = final_summary
322
+
323
+ if file_result:
324
+ upload_progress["files_processed"].append({
325
+ "filename": file_result.filename,
326
+ "status": file_result.status,
327
+ "message": file_result.message,
328
+ "hash": file_result.file_hash
329
+ })
330
+
331
+ if file_result.status == "error":
332
+ upload_progress["errors"].append(f"{file_result.filename}: {file_result.message}")
333
+
334
+ def process_files_async(uploaded_files: List) -> str:
335
+ """Process uploaded files asynchronously"""
336
+ try:
337
+ print(f"\n🎬 Starting bulk upload process...")
338
+
339
+ # Reset progress
340
+ with upload_lock:
341
+ upload_progress.update({
342
+ "current": 0,
343
+ "total": 0,
344
+ "status": "Extracting files...",
345
+ "files_processed": [],
346
+ "errors": [],
347
+ "final_summary": ""
348
+ })
349
+
350
+ # Extract model files
351
+ print(f"πŸ“¦ Extracting model files from uploaded content...")
352
+ model_files = extract_model_files(uploaded_files)
353
+ total_files = len(model_files)
354
+
355
+ print(f"πŸ“‹ Found {total_files} model files to process")
356
+ for i, file in enumerate(model_files, 1):
357
+ print(f" {i}. {file.name} ({file.stat().st_size / (1024*1024):.1f} MB)")
358
+
359
+ if total_files == 0:
360
+ print("❌ No supported model files found")
361
+ return "No supported model files (.safetensors, .pt, .bin) found in uploaded content."
362
+
363
+ update_progress(0, total_files, "Processing files...")
364
+
365
+ # Process files with thread pool for better performance
366
+ print(f"πŸ”„ Processing {total_files} files with ThreadPoolExecutor...")
367
+ results = []
368
+ with ThreadPoolExecutor(max_workers=3) as executor:
369
+ future_to_file = {
370
+ executor.submit(process_single_file, file): file
371
+ for file in model_files
372
+ }
373
+
374
+ for i, future in enumerate(as_completed(future_to_file), 1):
375
+ result = future.result()
376
+ results.append(result)
377
+ print(f"πŸ“Š Completed {i}/{total_files}: {result.filename} -> {result.status}")
378
+ update_progress(i, total_files, f"Processed {i}/{total_files} files", result)
379
+
380
+ # Generate summary
381
+ uploaded_count = sum(1 for r in results if r.status == "uploaded")
382
+ skipped_count = sum(1 for r in results if r.status == "skipped")
383
+ error_count = sum(1 for r in results if r.status == "error")
384
+
385
+ summary = f"""Processing Complete!
386
+
387
+ Total files: {total_files}
388
+ βœ… Uploaded: {uploaded_count}
389
+ ⏭️ Skipped: {skipped_count}
390
+ ❌ Errors: {error_count}"""
391
+
392
+ # Update progress with final summary
393
+ update_progress(total_files, total_files, "Complete", None, summary)
394
+
395
+ print(f"πŸŽ‰ Bulk upload completed: {uploaded_count} uploaded, {skipped_count} skipped, {error_count} errors")
396
+
397
+ # Cleanup temp files
398
+ print(f"🧹 Cleaning up temporary files...")
399
+ for file in model_files:
400
+ try:
401
+ if file.exists():
402
+ file.unlink()
403
+ # Also cleanup parent temp directory if empty
404
+ parent = file.parent
405
+ if parent.exists() and not any(parent.iterdir()):
406
+ parent.rmdir()
407
+ except:
408
+ pass
409
+
410
+ return summary
411
+
412
+ except Exception as e:
413
+ error_msg = f"Processing failed: {str(e)}"
414
+ print(f"πŸ’₯ Bulk processing failed: {e}")
415
+ traceback.print_exc()
416
+ update_progress(0, 0, error_msg, None, error_msg)
417
+ return error_msg
418
+
419
+ def get_progress_update():
420
+ """Get current progress status"""
421
+ with upload_lock:
422
+ if upload_progress["total"] == 0:
423
+ return "No active uploads", ""
424
+
425
+ current = upload_progress["current"]
426
+ total = upload_progress["total"]
427
+ status = upload_progress["status"]
428
+
429
+ # Show final summary if processing is complete
430
+ if current == total and total > 0 and "final_summary" in upload_progress:
431
+ progress_text = upload_progress["final_summary"]
432
+ else:
433
+ progress_text = f"Progress: {current}/{total} - {status}"
434
+
435
+ # Build detailed log
436
+ log_lines = []
437
+ for file_info in upload_progress["files_processed"][-10:]: # Show last 10
438
+ status_emoji = {"uploaded": "βœ…", "skipped": "⏭️", "error": "❌"}.get(file_info["status"], "?")
439
+ log_lines.append(f"{status_emoji} {file_info['filename']}: {file_info['message']}")
440
+
441
+ if upload_progress["errors"]:
442
+ log_lines.append(f"\nRecent Errors ({len(upload_progress['errors'])}):")
443
+ log_lines.extend(upload_progress["errors"][-5:]) # Show last 5 errors
444
+
445
+ detailed_log = "\n".join(log_lines)
446
+
447
+ return progress_text, detailed_log
448
+
449
+ def start_upload(files):
450
+ """Start the upload process in a separate thread"""
451
+ if not files:
452
+ return "No files selected", ""
453
+
454
+ # Start processing in background thread
455
+ thread = threading.Thread(target=process_files_async, args=(files,))
456
+ thread.daemon = True
457
+ thread.start()
458
+
459
+ return "Upload started! Check progress below...", ""
460
+
461
+ # Create Gradio interface
462
+ def create_interface():
463
+ with gr.Blocks(title="DataDrones Bulk Uploader", theme=gr.themes.Soft()) as iface:
464
+ gr.Markdown("""
465
+ # 🚁 DataDrones Bulk Uploader
466
+
467
+ Upload multiple model files (`.safetensors`, `.pt`, `.bin`) or zip archives containing model files to datadrones.com.
468
+
469
+ **Features:**
470
+ - Supports direct model file uploads (.safetensors, .pt, .bin) and zip archives
471
+ - Automatic hash checking to avoid duplicates
472
+ - Metadata retrieval from Civitai and other sources
473
+ - Real-time progress tracking
474
+ - Concurrent processing for faster uploads
475
+ """)
476
+
477
+ with gr.Row():
478
+ with gr.Column(scale=2):
479
+ file_input = gr.File(
480
+ label="Select model files (.safetensors, .pt, .bin) or .zip archives",
481
+ file_count="multiple",
482
+ file_types=[".safetensors", ".pt", ".bin", ".zip"]
483
+ )
484
+
485
+ upload_btn = gr.Button("πŸš€ Start Upload", variant="primary", size="lg")
486
+
487
+ with gr.Column(scale=1):
488
+ gr.Markdown("""
489
+ ### Instructions:
490
+ 1. Select multiple model files (`.safetensors`, `.pt`, `.bin`) directly, or
491
+ 2. Upload `.zip` archives containing model files
492
+ 3. Click "Start Upload" to begin processing
493
+ 4. Monitor progress in real-time below
494
+
495
+ **Note:** Files over 4GB will be skipped.
496
+ """)
497
+
498
+ gr.Markdown("---")
499
+
500
+ with gr.Row():
501
+ with gr.Column():
502
+ progress_display = gr.Textbox(
503
+ label="Upload Progress",
504
+ value="Ready to upload",
505
+ interactive=False
506
+ )
507
+
508
+ refresh_btn = gr.Button("πŸ”„ Refresh Progress", size="sm")
509
+
510
+ detailed_log = gr.Textbox(
511
+ label="Detailed Log",
512
+ value="",
513
+ lines=15,
514
+ interactive=False
515
+ )
516
+
517
+ # Set up event handlers
518
+ upload_btn.click(
519
+ fn=start_upload,
520
+ inputs=[file_input],
521
+ outputs=[progress_display, detailed_log]
522
+ )
523
+
524
+ # Manual refresh for progress updates
525
+ refresh_btn.click(
526
+ fn=get_progress_update,
527
+ outputs=[progress_display, detailed_log]
528
+ )
529
+
530
+ return iface
531
+
532
+ if __name__ == "__main__":
533
+ app = create_interface()
534
+ app.queue(max_size=10) # Enable queuing for background processing
535
+ app.launch(
536
+ server_name="0.0.0.0",
537
+ server_port=7860,
538
+ share=False
539
+ )
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio>=5.0.0
2
+ requests>=2.28.0
3
+ pathlib