Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -23,10 +23,12 @@ def main():
|
|
23 |
def translate_text(input_text, model, tokenizer):
|
24 |
# Tokenize input text
|
25 |
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
|
|
|
|
|
26 |
generated_tokens = model.generate(
|
27 |
-
|
28 |
-
|
29 |
-
)
|
30 |
|
31 |
# Decode translated text
|
32 |
translated_text = tokenizer.decode(generated_tokens[0], skip_special_tokens=True)
|
|
|
23 |
def translate_text(input_text, model, tokenizer):
|
24 |
# Tokenize input text
|
25 |
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
|
26 |
+
|
27 |
+
# Generate translation
|
28 |
generated_tokens = model.generate(
|
29 |
+
input_ids=input_ids,
|
30 |
+
forced_bos_token_id=tokenizer.lang_code_to_id["hi_IN"]
|
31 |
+
)
|
32 |
|
33 |
# Decode translated text
|
34 |
translated_text = tokenizer.decode(generated_tokens[0], skip_special_tokens=True)
|