import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM import torch # Load fine-tuned Assamese ↔ English model MODEL_NAME = "Sazid2/assamese-english-translator" tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME) # Translation function def translate_text(text): if not text.strip(): return "⚠️ Please enter some text." inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True) with torch.no_grad(): outputs = model.generate(**inputs, max_length=128) translation = tokenizer.decode(outputs[0], skip_special_tokens=True) return translation # Gradio UI demo = gr.Interface( fn=translate_text, inputs=gr.Textbox(label="Enter Assamese or English text", placeholder="Type here..."), outputs=gr.Textbox(label="Translation"), title="🇮🇳 Assamese ↔ English Translator", description="Fine-tuned model for Assamese ↔ English translation using Helsinki-NLP/opus-mt-mul-en base.", theme="soft" ) if __name__ == "__main__": demo.launch()