| import gradio as gr |
| import torch |
| import spaces |
| from transformers import AutoModelForSeq2SeqLM, AutoTokenizer |
| from peft import PeftModel |
|
|
| |
| BASE_MODEL_ID = "CohereForAI/aya-101" |
|
|
| MODEL_MAP = { |
| "English to Angika": "snjev310/aya-101-english-angika", |
| "Hindi to Angika": "snjev310/aya-101-hindi-angika", |
| "Angika to English": "snjev310/aya-101-angika-english" |
| } |
|
|
| |
| tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL_ID) |
|
|
| @spaces.GPU(duration=120) |
| def translate(text, model_choice): |
| if not text.strip(): |
| return "Please enter text to translate." |
| |
| adapter_id = MODEL_MAP[model_choice] |
| |
| |
| base_model = AutoModelForSeq2SeqLM.from_pretrained( |
| BASE_MODEL_ID, |
| torch_dtype=torch.bfloat16, |
| low_cpu_mem_usage=True, |
| device_map="auto" |
| ) |
| |
| |
| model = PeftModel.from_pretrained(base_model, adapter_id) |
| model.eval() |
| |
| |
| prompt = f"translate {model_choice}: {text}" |
| inputs = tokenizer(prompt, return_tensors="pt").to(model.device) |
| |
| with torch.no_grad(): |
| outputs = model.generate( |
| **inputs, |
| max_new_tokens=256, |
| do_sample=True, |
| temperature=0.3, |
| top_p=0.9 |
| ) |
| |
| result = tokenizer.decode(outputs[0], skip_special_tokens=True) |
| |
| |
| del model |
| del base_model |
| torch.cuda.empty_cache() |
| |
| return result |
|
|
| |
| |
| with gr.Blocks() as demo: |
| gr.Markdown("# 🗣️ Angika Machine Translation") |
| gr.Markdown("Select your translation direction below. This uses the Aya-101 13B model.") |
| |
| with gr.Row(): |
| with gr.Column(): |
| model_dropdown = gr.Dropdown( |
| choices=list(MODEL_MAP.keys()), |
| value="English to Angika", |
| label="Select Translation Mode" |
| ) |
| input_text = gr.Textbox(label="Input Text", placeholder="Type here...", lines=5) |
| submit_btn = gr.Button("Translate", variant="primary") |
| |
| with gr.Column(): |
| output_text = gr.Textbox(label="Translated Text", lines=5, interactive=False) |
|
|
| submit_btn.click( |
| fn=translate, |
| inputs=[input_text, model_dropdown], |
| outputs=output_text |
| ) |
| |
| gr.Examples( |
| examples=[ |
| ["How are you doing today?", "English to Angika"], |
| ["आप कैसे हैं?", "Hindi to Angika"], |
| ], |
| inputs=[input_text, model_dropdown] |
| ) |
|
|
| gr.Markdown("---") |
| with gr.Row(): |
| with gr.Column(): |
| gr.Markdown("### 🛠️ Help us improve!") |
| gr.Markdown("If the translation isn't perfect, please share the correct version with us.") |
| feedback_btn = gr.Button("Submit Correct Translation", variant="secondary") |
| |
| feedback_btn.click( |
| fn=None, |
| js='() => { window.open("https://forms.gle/FXspX7DxXHh5En6c7", "_blank"); }' |
| ) |
|
|
| with gr.Column(): |
| gr.Markdown("### ⚡ Support our computation") |
| gr.Markdown("Your support helps us cover the hosting costs for this 13B model.") |
| support_btn = gr.Button("Support Computation Costs ☕, UPI Id: AngikaAi@ybl ", variant="primary") |
| |
| support_btn.click( |
| fn=None, |
| js='() => { window.open("https://drive.google.com/file/d/1XIpXI25WzNingr4iDoNnt595kxK-YtCq/view?usp=sharing", "_blank"); }' |
| ) |
| |
| gr.Markdown("---") |
| gr.Markdown( |
| """ |
| <div style="text-align: center;"> |
| <p>📧 For any comments, suggestions, or help, please write to us at: <b>consultangika@gmail.com</b></p> |
| <p>🐦 Follow our X (Twitter) handle: <a href="https://x.com/AngikaAi" target="_blank">@AngikaAi</a></p> |
| </div> |
| """, |
| sanitize_html=False |
| ) |
|
|
| |
| demo.launch(theme=gr.themes.Soft()) |