Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -9,10 +9,16 @@ import uuid
|
|
9 |
import torch
|
10 |
from PIL import Image
|
11 |
|
12 |
-
# Force CPU usage
|
13 |
torch.backends.cudnn.enabled = False
|
|
|
14 |
torch.cuda.is_available = lambda: False
|
15 |
-
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
demo_path = Path(__file__).resolve().parent
|
18 |
root_path = demo_path
|
@@ -67,6 +73,16 @@ example_previews = [
|
|
67 |
[thumbnails[7], 'Prompt: sunglasses'],
|
68 |
]
|
69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
# Load models with CPU only
|
71 |
models.pre_download_inpainting_models()
|
72 |
inpainting_models = OrderedDict([
|
@@ -74,6 +90,32 @@ inpainting_models = OrderedDict([
|
|
74 |
("Stable-Inpainting 2.0", 'sd2_inp'),
|
75 |
("Stable-Inpainting 1.5", 'sd15_inp')
|
76 |
])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
sr_model = models.sd2_sr.load_model(device='cpu')
|
78 |
sam_predictor = models.sam.load_model(device='cpu')
|
79 |
|
@@ -140,7 +182,6 @@ def inpainting_run(model_name, use_rasg, use_painta, prompt, imageMask,
|
|
140 |
hr_image, seed, eta, negative_prompt, positive_prompt, ddim_steps,
|
141 |
guidance_scale=7.5, batch_size=1, session_id=''
|
142 |
):
|
143 |
-
# Remove torch.cuda.empty_cache() since we're not using GPU
|
144 |
set_model_from_name(model_name)
|
145 |
|
146 |
method = ['default']
|
@@ -206,8 +247,6 @@ def upscale_run(
|
|
206 |
if len(gallery) == 0:
|
207 |
return Image.open(root_path / '__assets__/demo/sr_info.png')
|
208 |
|
209 |
-
# Remove torch.cuda.empty_cache() since we're not using GPU
|
210 |
-
|
211 |
seed = int(seed)
|
212 |
img_index = int(img_index)
|
213 |
|
|
|
9 |
import torch
|
10 |
from PIL import Image
|
11 |
|
12 |
+
# Force CPU usage and disable CUDA completely
|
13 |
torch.backends.cudnn.enabled = False
|
14 |
+
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
|
15 |
torch.cuda.is_available = lambda: False
|
16 |
+
torch.cuda.device_count = lambda: 0
|
17 |
+
torch.cuda.get_device_name = lambda x: 'cpu'
|
18 |
+
torch.cuda.current_device = lambda: 0
|
19 |
+
torch.cuda.set_device = lambda x: None
|
20 |
+
torch.Tensor.cuda = lambda self, device=None: self
|
21 |
+
torch.nn.Module.cuda = lambda self, device=None: self
|
22 |
|
23 |
demo_path = Path(__file__).resolve().parent
|
24 |
root_path = demo_path
|
|
|
73 |
[thumbnails[7], 'Prompt: sunglasses'],
|
74 |
]
|
75 |
|
76 |
+
# Monkey patch any remaining CUDA calls in the models
|
77 |
+
original_to = torch.Tensor.to
|
78 |
+
def patched_to(self, *args, **kwargs):
|
79 |
+
if len(args) > 0 and isinstance(args[0], str) and args[0] == 'cuda':
|
80 |
+
return original_to(self, 'cpu')
|
81 |
+
if 'device' in kwargs and kwargs['device'] == 'cuda':
|
82 |
+
kwargs['device'] = 'cpu'
|
83 |
+
return original_to(self, *args, **kwargs)
|
84 |
+
torch.Tensor.to = patched_to
|
85 |
+
|
86 |
# Load models with CPU only
|
87 |
models.pre_download_inpainting_models()
|
88 |
inpainting_models = OrderedDict([
|
|
|
90 |
("Stable-Inpainting 2.0", 'sd2_inp'),
|
91 |
("Stable-Inpainting 1.5", 'sd15_inp')
|
92 |
])
|
93 |
+
|
94 |
+
# Patch model loading to ensure CPU usage
|
95 |
+
original_load_model = models.load_inpainting_model
|
96 |
+
def patched_load_model(*args, **kwargs):
|
97 |
+
kwargs['device'] = 'cpu'
|
98 |
+
model = original_load_model(*args, **kwargs)
|
99 |
+
model.to('cpu')
|
100 |
+
return model
|
101 |
+
models.load_inpainting_model = patched_load_model
|
102 |
+
|
103 |
+
original_sr_load_model = models.sd2_sr.load_model
|
104 |
+
def patched_sr_load_model(*args, **kwargs):
|
105 |
+
kwargs['device'] = 'cpu'
|
106 |
+
model = original_sr_load_model(*args, **kwargs)
|
107 |
+
model.to('cpu')
|
108 |
+
return model
|
109 |
+
models.sd2_sr.load_model = patched_sr_load_model
|
110 |
+
|
111 |
+
original_sam_load_model = models.sam.load_model
|
112 |
+
def patched_sam_load_model(*args, **kwargs):
|
113 |
+
kwargs['device'] = 'cpu'
|
114 |
+
model = original_sam_load_model(*args, **kwargs)
|
115 |
+
model.to('cpu')
|
116 |
+
return model
|
117 |
+
models.sam.load_model = patched_sam_load_model
|
118 |
+
|
119 |
sr_model = models.sd2_sr.load_model(device='cpu')
|
120 |
sam_predictor = models.sam.load_model(device='cpu')
|
121 |
|
|
|
182 |
hr_image, seed, eta, negative_prompt, positive_prompt, ddim_steps,
|
183 |
guidance_scale=7.5, batch_size=1, session_id=''
|
184 |
):
|
|
|
185 |
set_model_from_name(model_name)
|
186 |
|
187 |
method = ['default']
|
|
|
247 |
if len(gallery) == 0:
|
248 |
return Image.open(root_path / '__assets__/demo/sr_info.png')
|
249 |
|
|
|
|
|
250 |
seed = int(seed)
|
251 |
img_index = int(img_index)
|
252 |
|