Frenchizer commited on
Commit
f16b34f
·
verified ·
1 Parent(s): a7c166d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -8,6 +8,10 @@ MODEL_FILE = "./model.onnx"
8
  session = ort.InferenceSession(MODEL_FILE)
9
  tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-fr")
10
 
 
 
 
 
11
  # Gradio prediction function
12
  def gradio_predict(input_text):
13
  # Tokenize input text
@@ -41,10 +45,6 @@ def gradio_predict(input_text):
41
  translated_text = tokenizer.decode(outputs[0][0], skip_special_tokens=True)
42
  return translated_text
43
 
44
- # Add this after loading the model
45
- print("Model inputs:", [input.name for input in session.get_inputs()])
46
- print("Input shapes:", [input.shape for input in session.get_inputs()])
47
-
48
  except Exception as e:
49
  print(f"Error during inference: {e}")
50
  return "An error occurred during inference."
@@ -55,4 +55,4 @@ gr.Interface(
55
  inputs="text",
56
  outputs="text",
57
  live=True
58
- ).launch(share=True)
 
8
  session = ort.InferenceSession(MODEL_FILE)
9
  tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-fr")
10
 
11
+ # Add this after loading the model
12
+ print("Model inputs:", [input.name for input in session.get_inputs()])
13
+ print("Input shapes:", [input.shape for input in session.get_inputs()])
14
+
15
  # Gradio prediction function
16
  def gradio_predict(input_text):
17
  # Tokenize input text
 
45
  translated_text = tokenizer.decode(outputs[0][0], skip_special_tokens=True)
46
  return translated_text
47
 
 
 
 
 
48
  except Exception as e:
49
  print(f"Error during inference: {e}")
50
  return "An error occurred during inference."
 
55
  inputs="text",
56
  outputs="text",
57
  live=True
58
+ ).launch()