Update README.md
Browse files
README.md
CHANGED
@@ -119,7 +119,7 @@ inputs = tokenizer(text, return_tensors="pt")
|
|
119 |
outputs = model.generate(**inputs.to("cuda"), max_new_tokens=185,do_sample=False,top_k=None,temperature=1.0,top_p=None)
|
120 |
|
121 |
# Decode the output
|
122 |
-
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
123 |
print(result)
|
124 |
```
|
125 |
|
|
|
119 |
outputs = model.generate(**inputs.to("cuda"), max_new_tokens=185,do_sample=False,top_k=None,temperature=1.0,top_p=None)
|
120 |
|
121 |
# Decode the output
|
122 |
+
result = tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:], skip_special_tokens=True)
|
123 |
print(result)
|
124 |
```
|
125 |
|