Manjushri commited on
Commit
893441b
·
verified ·
1 Parent(s): f606112

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -2
app.py CHANGED
@@ -3,7 +3,7 @@ import torch
3
  import numpy as np
4
  import modin.pandas as pd
5
  from PIL import Image
6
- from diffusers import DiffusionPipeline #, StableDiffusion3Pipeline
7
  from huggingface_hub import hf_hub_download
8
 
9
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
@@ -33,10 +33,18 @@ def genie (Model, Prompt, negative_prompt, height, width, scale, steps, seed):
33
  torch.cuda.empty_cache()
34
  return image
35
 
 
 
 
 
 
 
 
 
36
 
37
  return image
38
 
39
- gr.Interface(fn=genie, inputs=[gr.Radio(['PhotoReal', 'Animagine XL 4',], value='PhotoReal', label='Choose Model'),
40
  gr.Textbox(label='What you want the AI to generate. 77 Token Limit.'),
41
  gr.Textbox(label='What you Do Not want the AI to generate. 77 Token Limit'),
42
  gr.Slider(512, 1024, 768, step=128, label='Height'),
 
3
  import numpy as np
4
  import modin.pandas as pd
5
  from PIL import Image
6
+ from diffusers import DiffusionPipeline, CogView4Pipeline #, StableDiffusion3Pipeline from diffusers import CogView4Pipeline
7
  from huggingface_hub import hf_hub_download
8
 
9
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
 
33
  torch.cuda.empty_cache()
34
  return image
35
 
36
+ if Model == "Club":
37
+ pipe = CogView4Pipeline.from_pretrained("THUDM/CogView4-6B", torch_dtype=torch.bfloat16)
38
+ # Open it for reduce GPU memory usage
39
+ pipe.enable_model_cpu_offload()
40
+ pipe.vae.enable_slicing()
41
+ pipe.vae.enable_tiling()
42
+ image = pipe(prompt=Prompt, guidance_scale=scale, num_images_per_prompt=1, num_inference_steps=steps, width=1024, height=1024,).images[0]
43
+ return image
44
 
45
  return image
46
 
47
+ gr.Interface(fn=genie, inputs=[gr.Radio(['PhotoReal', 'Animagine XL 4', "Club"], value='PhotoReal', label='Choose Model'),
48
  gr.Textbox(label='What you want the AI to generate. 77 Token Limit.'),
49
  gr.Textbox(label='What you Do Not want the AI to generate. 77 Token Limit'),
50
  gr.Slider(512, 1024, 768, step=128, label='Height'),