Spaces:
Running
on
Zero
Running
on
Zero
Fix top-k/top-p
Browse files
app.py
CHANGED
@@ -183,7 +183,7 @@ def generate_diffusion_text(input_ids):
|
|
183 |
with torch.no_grad():
|
184 |
input_tensor = torch.tensor([input_ids], dtype=torch.long).to(model.device)
|
185 |
logits = model(input_ids=input_tensor)["logits"]
|
186 |
-
logits = filter_logits(logits, top_k=
|
187 |
logits = logits.clamp(min=-1e8, max=1e4)
|
188 |
probs = torch.nn.functional.softmax(logits, dim=-1)[0]
|
189 |
probs = torch.clamp(probs, min=1e-8, max=1.0)
|
|
|
183 |
with torch.no_grad():
|
184 |
input_tensor = torch.tensor([input_ids], dtype=torch.long).to(model.device)
|
185 |
logits = model(input_ids=input_tensor)["logits"]
|
186 |
+
logits = filter_logits(logits, top_k=100, top_p=0.9)
|
187 |
logits = logits.clamp(min=-1e8, max=1e4)
|
188 |
probs = torch.nn.functional.softmax(logits, dim=-1)[0]
|
189 |
probs = torch.clamp(probs, min=1e-8, max=1.0)
|