Spaces:
Runtime error
Runtime error
File size: 967 Bytes
28700b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import torch
import librosa
import gradio as gr
# Cargamos el modelo de guaraní
model_name = "ivangtorre/wav2vec2-xlsr-300m-guarani"
processor = Wav2Vec2Processor.from_pretrained(model_name)
model = Wav2Vec2ForCTC.from_pretrained(model_name)
# Transcripción
def transcribir(audio):
audio_data, _ = librosa.load(audio, sr=16000)
inputs = processor(audio_data, sampling_rate=16000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(**inputs).logits
predicted_ids = torch.argmax(logits, dim=-1)
transcription = processor.batch_decode(predicted_ids)[0]
return transcription.lower()
# Interfaz de Gradio
demo = gr.Interface(
fn=transcribir,
inputs=gr.Audio(type="filepath"),
outputs="text",
title="Transcriptor Guaraní",
description="Subí un audio en guaraní (.ogg, .wav) y obtené la transcripción"
)
|