mac9087 commited on
Commit
f2606ca
·
verified ·
1 Parent(s): 59e224e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import os
2
  import torch
3
  import time
@@ -15,7 +16,10 @@ from flask_cors import CORS
15
  import numpy as np
16
  import trimesh
17
  import cv2
18
- from lgm.models import LGM
 
 
 
19
 
20
  os.environ["CUDA_VISIBLE_DEVICES"] = ""
21
  torch.set_default_device("cpu")
@@ -133,6 +137,9 @@ def load_model():
133
  else:
134
  raise
135
 
 
 
 
136
  lgm_model = LGM.from_pretrained(
137
  model_name,
138
  cache_dir=CACHE_DIR,
@@ -197,7 +204,7 @@ def progress(job_id):
197
 
198
  yield f"data: {json.dumps({'status': 'processing', 'progress': job['progress']})}\n\n"
199
 
200
- last_progress = job['progress']
201
  check_count = 0
202
  while job['status'] == 'processing':
203
  if job['progress'] != last_progress:
 
1
+
2
  import os
3
  import torch
4
  import time
 
16
  import numpy as np
17
  import trimesh
18
  import cv2
19
+ try:
20
+ from lgm.models import LGM
21
+ except ImportError:
22
+ LGM = None
23
 
24
  os.environ["CUDA_VISIBLE_DEVICES"] = ""
25
  torch.set_default_device("cpu")
 
137
  else:
138
  raise
139
 
140
+ if LGM is None:
141
+ raise ImportError("LGM module not available. Ensure lgm is installed from https://github.com/baaivision/LGM.")
142
+
143
  lgm_model = LGM.from_pretrained(
144
  model_name,
145
  cache_dir=CACHE_DIR,
 
204
 
205
  yield f"data: {json.dumps({'status': 'processing', 'progress': job['progress']})}\n\n"
206
 
207
+ ’T last_progress = job['progress']
208
  check_count = 0
209
  while job['status'] == 'processing':
210
  if job['progress'] != last_progress: