Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -60,8 +60,8 @@ def load_pipeline():
|
|
60 |
torch_dtype = torch.float16
|
61 |
|
62 |
# torch_dtype = torch.float16
|
63 |
-
pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
|
64 |
-
pipe.vae = AutoencoderKL.from_pretrained(vae_model_id, subfolder=vae_folder, torch_dtype=torch_dtype)
|
65 |
pipe.load_lora_weights(
|
66 |
hf_hub_download(repo_id="jiaxiangc/res-adapter", subfolder=resadapter_model_name, filename="pytorch_lora_weights.safetensors"),
|
67 |
adapter_name="res_adapter",
|
@@ -118,6 +118,7 @@ def resize_image_with_aspect(image, res_range_min=128, res_range_max=1024):
|
|
118 |
def reconstruct(input_img, caption):
|
119 |
|
120 |
pipe, inverse_scheduler, scheduler = load_pipeline()
|
|
|
121 |
|
122 |
global weights
|
123 |
weights = {}
|
@@ -346,7 +347,8 @@ def replace_attention_processor(unet, clear=False, blur_sigma=None):
|
|
346 |
@spaces.GPU()
|
347 |
def apply_prompt(meta_data, new_prompt):
|
348 |
|
349 |
-
pipe,
|
|
|
350 |
|
351 |
caption, real_image_initial_latents, inversed_latents, _ = meta_data
|
352 |
negative_prompt = ""
|
|
|
60 |
torch_dtype = torch.float16
|
61 |
|
62 |
# torch_dtype = torch.float16
|
63 |
+
pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
|
64 |
+
pipe.vae = AutoencoderKL.from_pretrained(vae_model_id, subfolder=vae_folder, torch_dtype=torch_dtype)
|
65 |
pipe.load_lora_weights(
|
66 |
hf_hub_download(repo_id="jiaxiangc/res-adapter", subfolder=resadapter_model_name, filename="pytorch_lora_weights.safetensors"),
|
67 |
adapter_name="res_adapter",
|
|
|
118 |
def reconstruct(input_img, caption):
|
119 |
|
120 |
pipe, inverse_scheduler, scheduler = load_pipeline()
|
121 |
+
pipe.to("cuda")
|
122 |
|
123 |
global weights
|
124 |
weights = {}
|
|
|
347 |
@spaces.GPU()
|
348 |
def apply_prompt(meta_data, new_prompt):
|
349 |
|
350 |
+
pipe, _, scheduler = load_pipeline()
|
351 |
+
pipe.to("cuda")
|
352 |
|
353 |
caption, real_image_initial_latents, inversed_latents, _ = meta_data
|
354 |
negative_prompt = ""
|