Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import MBartForConditionalGeneration, MBart50TokenizerFast | |
| # Load the mBART model and tokenizer for multilingual translation | |
| model_name = "facebook/mbart-large-50-many-to-many-mmt" | |
| model = MBartForConditionalGeneration.from_pretrained(model_name) | |
| tokenizer = MBart50TokenizerFast.from_pretrained(model_name, src_lang="en_XX", tgt_lang="th_TH") | |
| # Prediction function | |
| def translate_text(input_text): | |
| inputs = tokenizer.encode(input_text, return_tensors="pt") | |
| outputs = model.generate( | |
| inputs, | |
| max_new_tokens=40, | |
| do_sample=True, | |
| top_k=30, | |
| top_p=0.95 | |
| ) | |
| translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| return translated_text | |
| # Gradio interface | |
| interface = gr.Interface( | |
| fn=translate_text, | |
| inputs="text", | |
| outputs="text", | |
| title="Language Translation", | |
| description="Translate text using the my_awesome_opus_books_model." | |
| ) | |
| # Launch the Gradio app | |
| interface.launch() |