Spaces:
Paused
Paused
| import gradio as gr | |
| import requests | |
| import os | |
| # Hugging Face API URL and token for the model | |
| API_URL = "https://api-inference.huggingface.co/models/google/bigbird-pegasus-large-pubmed" | |
| HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY") | |
| # Define a function to send user input to the model | |
| def get_bot_response(user_input): | |
| headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"} | |
| response = requests.post(API_URL, headers=headers, json={"inputs": user_input}) | |
| # Debugging: print status and response | |
| print("Status Code:", response.status_code) | |
| print("Response:", response.text) | |
| if response.status_code == 200: | |
| result = response.json() | |
| bot_response = result[0].get("generated_text", "Sorry, I couldn't generate a response.") | |
| else: | |
| bot_response = "Sorry, the model is currently unavailable." | |
| return bot_response | |
| # Set up Gradio interface | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# Medical Consultation Chatbot") | |
| user_input = gr.Textbox(label="Enter your question:") | |
| output = gr.Textbox(label="Bot Response") | |
| # On submit, call the get_bot_response function | |
| user_input.submit(get_bot_response, user_input, output) | |
| demo.launch() | |