| import gradio as gr |
| from huggingface_hub import InferenceClient |
|
|
| from transformers import pipeline |
|
|
| pipe = pipeline("text-generation", model="Mochiva-team/Mochiva-model", trust_remote_code=True) |
| def woo(): |
| print(1) |
|
|
| """ |
| For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface |
| """ |
| chatbot = gr.ChatInterface( |
| woo, |
| additional_inputs=[ |
| gr.Textbox(value="You are a friendly Chatbot.", label="System message"), |
| gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"), |
| gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), |
| gr.Slider( |
| minimum=0.1, |
| maximum=1.0, |
| value=0.95, |
| step=0.05, |
| label="Top-p (nucleus sampling)", |
| ), |
| ], |
| ) |
|
|
| with gr.Blocks() as demo: |
| with gr.Sidebar(): |
| gr.LoginButton() |
| chatbot.render() |
|
|
|
|
| if __name__ == "__main__": |
| demo.launch() |
|
|