Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import os
|
2 |
-
|
3 |
-
|
4 |
|
5 |
import torch
|
6 |
import torchvision
|
@@ -110,7 +110,6 @@ def inference(prompt, negative_prompt, guidance_scale, ddim_steps, seed):
|
|
110 |
|
111 |
|
112 |
|
113 |
-
# @spaces.GPU()
|
114 |
@torch.no_grad()
|
115 |
def edit_inference(input_image, prompt, negative_prompt, guidance_scale, ddim_steps, seed, start_noise, a1, a2, a3, a4):
|
116 |
|
@@ -197,7 +196,7 @@ def edit_inference(input_image, prompt, negative_prompt, guidance_scale, ddim_st
|
|
197 |
|
198 |
return (image, input_image["background"])
|
199 |
|
200 |
-
|
201 |
def sample_then_run():
|
202 |
sample_model()
|
203 |
prompt = "sks person"
|
@@ -342,7 +341,6 @@ def invert(image, mask, pcs=10000, epochs=400, weight_decay = 1e-10, lr=1e-1):
|
|
342 |
return network
|
343 |
|
344 |
|
345 |
-
# @spaces.GPU(duration=200)
|
346 |
def run_inversion(input_image, pcs, epochs, weight_decay,lr):
|
347 |
global network
|
348 |
init_image = input_image["background"].convert("RGB").resize((512, 512))
|
@@ -364,7 +362,6 @@ def run_inversion(input_image, pcs, epochs, weight_decay,lr):
|
|
364 |
|
365 |
|
366 |
|
367 |
-
# @spaces.GPU()
|
368 |
def file_upload(file):
|
369 |
global unet
|
370 |
del unet
|
@@ -438,10 +435,10 @@ with gr.Blocks(css="style.css") as demo:
|
|
438 |
with gr.Column():
|
439 |
with gr.Row():
|
440 |
with gr.Column():
|
441 |
-
|
442 |
-
|
443 |
-
input_image = gr.ImageEditor(elem_id="image_upload", type='pil', label="Upload image and draw to define mask",
|
444 |
-
|
445 |
with gr.Row():
|
446 |
sample = gr.Button("🎲 Sample New Model")
|
447 |
invert_button = gr.Button("⏪ Invert")
|
|
|
1 |
import os
|
2 |
+
os.system("pip uninstall -y gradio")
|
3 |
+
os.system('pip install gradio==3.43.1')
|
4 |
|
5 |
import torch
|
6 |
import torchvision
|
|
|
110 |
|
111 |
|
112 |
|
|
|
113 |
@torch.no_grad()
|
114 |
def edit_inference(input_image, prompt, negative_prompt, guidance_scale, ddim_steps, seed, start_noise, a1, a2, a3, a4):
|
115 |
|
|
|
196 |
|
197 |
return (image, input_image["background"])
|
198 |
|
199 |
+
|
200 |
def sample_then_run():
|
201 |
sample_model()
|
202 |
prompt = "sks person"
|
|
|
341 |
return network
|
342 |
|
343 |
|
|
|
344 |
def run_inversion(input_image, pcs, epochs, weight_decay,lr):
|
345 |
global network
|
346 |
init_image = input_image["background"].convert("RGB").resize((512, 512))
|
|
|
362 |
|
363 |
|
364 |
|
|
|
365 |
def file_upload(file):
|
366 |
global unet
|
367 |
del unet
|
|
|
435 |
with gr.Column():
|
436 |
with gr.Row():
|
437 |
with gr.Column():
|
438 |
+
input_image = gr.Image(source='upload', elem_id="image_upload", tool='sketch', type='pil', label="Upload image and draw to define mask",
|
439 |
+
height=512, width=512, brush_color='#00FFFF', mask_opacity=0.6)
|
440 |
+
# input_image = gr.ImageEditor(elem_id="image_upload", type='pil', label="Upload image and draw to define mask",
|
441 |
+
# height=512, width=512, brush=gr.Brush(), layers=False)
|
442 |
with gr.Row():
|
443 |
sample = gr.Button("🎲 Sample New Model")
|
444 |
invert_button = gr.Button("⏪ Invert")
|