Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -17,13 +17,13 @@ def haiku_generate():
|
|
17 |
if not torch.cuda.is_available():
|
18 |
inputs ={k:v.to("cpu") for k,v in inputs.items()}
|
19 |
with torch.no_glad():
|
20 |
-
outputs = model.generate
|
21 |
**inputs,
|
22 |
max_new_tokens=25,
|
23 |
do_sample=True,
|
24 |
temperature=0.7,
|
25 |
top_p=0.9
|
26 |
-
|
27 |
return tokenizer.decode(outputs[0],skip_special_tokens=True)
|
28 |
|
29 |
textbox = gr.Textbox
|
|
|
17 |
if not torch.cuda.is_available():
|
18 |
inputs ={k:v.to("cpu") for k,v in inputs.items()}
|
19 |
with torch.no_glad():
|
20 |
+
outputs = model.generate(
|
21 |
**inputs,
|
22 |
max_new_tokens=25,
|
23 |
do_sample=True,
|
24 |
temperature=0.7,
|
25 |
top_p=0.9
|
26 |
+
)
|
27 |
return tokenizer.decode(outputs[0],skip_special_tokens=True)
|
28 |
|
29 |
textbox = gr.Textbox
|