Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -133,7 +133,7 @@ if user_input:
|
|
133 |
if torch.cuda.is_available():
|
134 |
inputs = {k: v.to('cuda') for k, v in inputs.items()}
|
135 |
|
136 |
-
outputs = model.generate(**inputs, max_length=
|
137 |
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
138 |
reply = response[len(input_text):].strip()
|
139 |
|
|
|
133 |
if torch.cuda.is_available():
|
134 |
inputs = {k: v.to('cuda') for k, v in inputs.items()}
|
135 |
|
136 |
+
outputs = model.generate(**inputs, max_length=5000, do_sample=True, temperature=0.7, pad_token_id=tokenizer.eos_token_id)
|
137 |
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
138 |
reply = response[len(input_text):].strip()
|
139 |
|