LymphSteamer commited on
Commit
8ecaac5
·
verified ·
1 Parent(s): cd5852e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -16,7 +16,7 @@ def haiku_generate():
16
  inputs = tokenizer(prompt,return_tensors="pt")
17
  if not torch.cuda.is_available():
18
  inputs ={k:v.to("cpu") for k,v in inputs.items()}
19
- with torch.no_glad():
20
  outputs = model.generate(
21
  **inputs,
22
  max_new_tokens=25,
 
16
  inputs = tokenizer(prompt,return_tensors="pt")
17
  if not torch.cuda.is_available():
18
  inputs ={k:v.to("cpu") for k,v in inputs.items()}
19
+ with torch.no_grad():
20
  outputs = model.generate(
21
  **inputs,
22
  max_new_tokens=25,