Spaces:
Running
Running
# 1. Install necessary libraries (if you haven't already) | |
# pip install gradio-client transformers torch sentencepiece | |
# 2. Import libraries | |
import gradio as gr | |
from transformers import pipeline | |
import torch # PyTorch is needed as a backend for transformers | |
# 3. Load the translation pipeline | |
# Using the NLLB model which supports many languages including English and Telugu | |
# You might need to adjust device mapping based on your hardware (e.g., device=0 for GPU) | |
try: | |
# Try loading the specific model mentioned implicitly by language codes | |
translator = pipeline('translation', model='facebook/nllb-200-distilled-600M', device=-1) # Use -1 for CPU | |
print("Translator pipeline loaded successfully.") | |
except Exception as e: | |
print(f"Error loading translator pipeline: {e}") | |
# Define a dummy function if pipeline fails to load, so Gradio interface still runs | |
def translator(text, src_lang, tgt_lang): | |
return f"Error: Could not load translation model. {e}" | |
# 4. Define the translation function for Gradio | |
def translate_text(text_to_translate, source_language, target_language): | |
""" | |
Translates text using the loaded Hugging Face pipeline. | |
Args: | |
text_to_translate (str): The text to translate. | |
source_language (str): The source language code (e.g., 'eng_Latn'). | |
target_language (str): The target language code (e.g., 'tel_Telu'). | |
Returns: | |
str: The translated text or an error message. | |
""" | |
if not text_to_translate: | |
return "Please enter text to translate." | |
if not source_language or not target_language: | |
return "Please select both source and target languages." | |
try: | |
# Perform the translation using the pipeline | |
# Note: The pipeline function expects keyword arguments src_lang and tgt_lang | |
translated_output = translator(text_to_translate, | |
src_lang=source_language, | |
tgt_lang=target_language) | |
# The output is usually a list containing a dictionary | |
if translated_output and isinstance(translated_output, list): | |
return translated_output[0]['translation_text'] | |
else: | |
# Handle unexpected output format | |
return f"Translation failed. Unexpected output: {translated_output}" | |
except Exception as e: | |
print(f"Translation error: {e}") | |
# Provide a user-friendly error message | |
return f"An error occurred during translation: {e}. Make sure the language codes are correct and supported by the model." | |
# 5. Define language choices for dropdowns (using NLLB codes) | |
# Add more languages as needed from the NLLB supported list | |
language_choices = [ | |
("English", "eng_Latn"), | |
("Telugu", "tel_Telu"), | |
("Hindi", "hin_Deva"), | |
("Tamil", "tam_Taml"), | |
("Spanish", "spa_Latn"), | |
("French", "fra_Latn"), | |
("German", "deu_Latn"), | |
("Chinese (Simplified)", "zho_Hans"), | |
] | |
# 6. Create the Gradio interface | |
with gr.Blocks(theme=gr.themes.Soft()) as iface: | |
gr.Markdown("# Text Translator using NLLB Model") | |
gr.Markdown("Enter text and select the source and target languages.") | |
with gr.Row(): | |
# Input text area | |
input_text = gr.Textbox(label="Text to Translate", placeholder="Enter text here...", lines=5) | |
# Output text area | |
output_text = gr.Textbox(label="Translated Text", placeholder="Translation will appear here...", lines=5, interactive=False) | |
with gr.Row(): | |
# Source language dropdown | |
source_lang = gr.Dropdown( | |
label="Source Language", | |
choices=language_choices, | |
value="eng_Latn" # Default to English | |
) | |
# Target language dropdown | |
target_lang = gr.Dropdown( | |
label="Target Language", | |
choices=language_choices, | |
value="tel_Telu" # Default to Telugu | |
) | |
# Translate button | |
translate_button = gr.Button("Translate", variant="primary") | |
# Define the action when the button is clicked | |
translate_button.click( | |
fn=translate_text, | |
inputs=[input_text, source_lang, target_lang], | |
outputs=output_text, | |
api_name="translate" # Name for API endpoint if needed | |
) | |
gr.Markdown("Powered by Hugging Face Transformers and Gradio.") | |
# 7. Launch the Gradio app | |
# When running locally, this will provide a URL. | |
# On Hugging Face Spaces, this line makes the app run. | |
if __name__ == "__main__": | |
iface.launch() | |