Soh19 commited on
Commit
b0a8791
·
verified ·
1 Parent(s): 8ef92c3

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -119,7 +119,7 @@ inputs = tokenizer(text, return_tensors="pt")
119
  outputs = model.generate(**inputs.to("cuda"), max_new_tokens=185,do_sample=False,top_k=None,temperature=1.0,top_p=None)
120
 
121
  # Decode the output
122
- result = tokenizer.decode(outputs[0], skip_special_tokens=True)
123
  print(result)
124
  ```
125
 
 
119
  outputs = model.generate(**inputs.to("cuda"), max_new_tokens=185,do_sample=False,top_k=None,temperature=1.0,top_p=None)
120
 
121
  # Decode the output
122
+ result = tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:], skip_special_tokens=True)
123
  print(result)
124
  ```
125