inoculatemedia commited on
Commit
d774138
·
verified ·
1 Parent(s): e54e820

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -16
app.py CHANGED
@@ -5,14 +5,6 @@ import random
5
  import spaces
6
  import torch
7
  from diffusers import SanaSprintPipeline
8
- from mcp.server.fastmcp import FastMCP
9
- from gradio_client import Client
10
- import sys
11
- import io
12
- import json
13
-
14
- mcp = FastMCP("gradio-spaces")
15
-
16
 
17
  dtype = torch.bfloat16
18
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -31,7 +23,6 @@ MAX_SEED = np.iinfo(np.int32).max
31
  MAX_IMAGE_SIZE = 1024
32
 
33
  @spaces.GPU(duration=5)
34
- @mcp.tool()
35
  def infer(prompt, model_size, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=4.5, num_inference_steps=2, progress=gr.Progress(track_tqdm=True)):
36
  if randomize_seed:
37
  seed = random.randint(0, MAX_SEED)
@@ -154,10 +145,5 @@ with gr.Blocks(css=css) as demo:
154
  inputs = [prompt, model_size, seed, randomize_seed, width, height, guidance_scale, num_inference_steps], # Add model_size to inputs
155
  outputs = [result, seed]
156
  )
157
- if __name__ == "__main__":
158
- import sys
159
- import io
160
- sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
161
-
162
- mcp.run(transport='stdio')
163
- demo.launch(share=True)
 
5
  import spaces
6
  import torch
7
  from diffusers import SanaSprintPipeline
 
 
 
 
 
 
 
 
8
 
9
  dtype = torch.bfloat16
10
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
23
  MAX_IMAGE_SIZE = 1024
24
 
25
  @spaces.GPU(duration=5)
 
26
  def infer(prompt, model_size, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=4.5, num_inference_steps=2, progress=gr.Progress(track_tqdm=True)):
27
  if randomize_seed:
28
  seed = random.randint(0, MAX_SEED)
 
145
  inputs = [prompt, model_size, seed, randomize_seed, width, height, guidance_scale, num_inference_steps], # Add model_size to inputs
146
  outputs = [result, seed]
147
  )
148
+
149
+ demo.launch(share=True)