ovi054 commited on
Commit
be31f5e
·
verified ·
1 Parent(s): c8f7bf3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -47
app.py CHANGED
@@ -62,63 +62,55 @@ OPTIONAL_LORA_MAP = {
62
  }
63
  OPTIONAL_LORA_CHOICES = {k: v for k, v in OPTIONAL_LORA_MAP.items() if v in SUCCESSFULLY_LOADED_LORAS}
64
 
65
- # At startup, disable all adapters. They will be selectively enabled during each run.
66
- print("Disabling all LoRAs at startup. They will be activated on-demand.")
67
- pipe.disable_lora()
68
 
69
  print("Initialization complete. Gradio is starting...")
70
 
71
  @spaces.GPU()
72
  def generate(prompt, negative_prompt, width, height, num_inference_steps, optional_lora_id, progress=gr.Progress(track_tqdm=True)):
73
 
74
- # Using a try...finally block is robust for state management in apps.
75
- try:
76
- # --- Step 1: Build the list of ACTIVE adapters and their weights for THIS run ---
77
-
78
- active_adapters = []
79
- adapter_weights = []
 
 
 
80
 
81
- # Always include the base LoRA if it was loaded successfully
82
- if CAUSVID_NAME in SUCCESSFULLY_LOADED_LORAS:
83
- active_adapters.append(CAUSVID_NAME)
 
 
84
  adapter_weights.append(1.0)
85
-
86
- # If an optional LoRA is selected, add it to the list
87
- if optional_lora_id and optional_lora_id != "None":
88
- internal_name_to_add = OPTIONAL_LORA_CHOICES.get(optional_lora_id)
89
- if internal_name_to_add:
90
- active_adapters.append(internal_name_to_add)
91
- adapter_weights.append(1.0)
92
-
93
- # --- Step 2: Apply the adapters and weights for this run using the correct function ---
94
- if active_adapters:
95
- print(f"Activating adapters: {active_adapters} with weights: {adapter_weights}")
96
- # This is the correct, modern way to set adapters and their weights.
97
- pipe.set_adapters(active_adapters, adapter_weights=adapter_weights)
98
- else:
99
- print("No LoRAs are active for this run.")
100
- # ensure all are disabled if for some reason none were selected
101
- pipe.disable_lora()
102
 
103
- apply_cache_on_pipe(pipe)
104
-
105
- # --- Step 3: Run inference ---
106
- output = pipe(
107
- prompt=prompt,
108
- negative_prompt=negative_prompt,
109
- height=height,
110
- width=width,
111
- num_frames=1,
112
- num_inference_steps=num_inference_steps,
113
- guidance_scale=1.0,
114
- )
115
- image = output.frames[0][0]
116
- image = (image * 255).astype(np.uint8)
117
- return Image.fromarray(image)
 
 
 
 
 
 
 
118
 
119
- finally:
120
- print("Disabling LoRAs after run to reset state.")
121
- pipe.disable_lora()
122
 
123
  # --- Gradio Interface ---
124
  iface = gr.Interface(
 
62
  }
63
  OPTIONAL_LORA_CHOICES = {k: v for k, v in OPTIONAL_LORA_MAP.items() if v in SUCCESSFULLY_LOADED_LORAS}
64
 
65
+ # At startup, we set a known initial state. Setting no adapters is the cleanest start.
66
+ print("Setting a clean initial state (no adapters active).")
67
+ pipe.set_adapters([], adapter_weights=[])
68
 
69
  print("Initialization complete. Gradio is starting...")
70
 
71
  @spaces.GPU()
72
  def generate(prompt, negative_prompt, width, height, num_inference_steps, optional_lora_id, progress=gr.Progress(track_tqdm=True)):
73
 
74
+ # --- Step 1: ALWAYS build the desired state from scratch for THIS run ---
75
+
76
+ active_adapters = []
77
+ adapter_weights = []
78
+
79
+ # Always include the base LoRA if it was loaded successfully
80
+ if CAUSVID_NAME in SUCCESSFULLY_LOADED_LORAS:
81
+ active_adapters.append(CAUSVID_NAME)
82
+ adapter_weights.append(1.0)
83
 
84
+ # If an optional LoRA is selected, add it to the list
85
+ if optional_lora_id and optional_lora_id != "None":
86
+ internal_name_to_add = OPTIONAL_LORA_CHOICES.get(optional_lora_id)
87
+ if internal_name_to_add:
88
+ active_adapters.append(internal_name_to_add)
89
  adapter_weights.append(1.0)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
+ # --- Step 2: Apply the calculated state, OVERWRITING any previous state ---
92
+ # This single call is the source of truth for the run.
93
+ print(f"Setting adapters for this run: {active_adapters} with weights: {adapter_weights}")
94
+ pipe.set_adapters(active_adapters, adapter_weights=adapter_weights)
95
+
96
+ apply_cache_on_pipe(pipe)
97
+
98
+ # --- Step 3: Run inference ---
99
+ output = pipe(
100
+ prompt=prompt,
101
+ negative_prompt=negative_prompt,
102
+ height=height,
103
+ width=width,
104
+ num_frames=1,
105
+ num_inference_steps=num_inference_steps,
106
+ guidance_scale=1.0,
107
+ )
108
+ image = output.frames[0][0]
109
+ image = (image * 255).astype(np.uint8)
110
+ return Image.fromarray(image)
111
+
112
+ # --- No cleanup step is needed, as the next run will set its own state ---
113
 
 
 
 
114
 
115
  # --- Gradio Interface ---
116
  iface = gr.Interface(