Spaces:
Runtime error
Runtime error
update cap
Browse files
app.py
CHANGED
|
@@ -241,7 +241,7 @@ def gradio_ask(user_message, chatbot, chat_state, radio):
|
|
| 241 |
# if len(user_message) == 0:
|
| 242 |
# return gr.update(interactive=True, placeholder='Input should not be empty!'), chatbot, chat_state
|
| 243 |
|
| 244 |
-
chat.ask(user_message, chat_state, radio
|
| 245 |
chatbot = chatbot + [[user_message, None]]
|
| 246 |
return chatbot, chat_state
|
| 247 |
|
|
@@ -250,13 +250,13 @@ def generate_ans(user_message, chatbot, chat_state, img_list, radio, text, num_b
|
|
| 250 |
# if len(user_message) == 0:
|
| 251 |
# return gr.update(interactive=True, placeholder='Input should not be empty!'), chatbot, chat_state
|
| 252 |
|
| 253 |
-
chat.ask(user_message, chat_state, radio
|
| 254 |
chatbot = chatbot + [[user_message, None]]
|
| 255 |
# return chatbot, chat_state
|
| 256 |
image = None
|
| 257 |
llm_message, image = \
|
| 258 |
chat.answer(conv=chat_state, img_list=img_list, max_new_tokens=300, num_beams=1, temperature=temperature,
|
| 259 |
-
max_length=2000, radio=radio, text_input=text
|
| 260 |
|
| 261 |
chatbot[-1][1] = llm_message
|
| 262 |
if chat_state[-1]["from"] == "gpt":
|
|
@@ -273,7 +273,7 @@ def gradio_answer(chatbot, chat_state, img_list, radio, text, num_beams, tempera
|
|
| 273 |
image = None
|
| 274 |
llm_message, image = \
|
| 275 |
chat.answer(conv=chat_state, img_list=img_list, max_new_tokens=300, num_beams=1, temperature=temperature,
|
| 276 |
-
max_length=2000, radio=radio, text_input=text
|
| 277 |
|
| 278 |
chatbot[-1][1] = llm_message
|
| 279 |
if chat_state[-1]["from"] == "gpt":
|
|
|
|
| 241 |
# if len(user_message) == 0:
|
| 242 |
# return gr.update(interactive=True, placeholder='Input should not be empty!'), chatbot, chat_state
|
| 243 |
|
| 244 |
+
chat.ask(user_message, chat_state, radio)
|
| 245 |
chatbot = chatbot + [[user_message, None]]
|
| 246 |
return chatbot, chat_state
|
| 247 |
|
|
|
|
| 250 |
# if len(user_message) == 0:
|
| 251 |
# return gr.update(interactive=True, placeholder='Input should not be empty!'), chatbot, chat_state
|
| 252 |
|
| 253 |
+
chat.ask(user_message, chat_state, radio)
|
| 254 |
chatbot = chatbot + [[user_message, None]]
|
| 255 |
# return chatbot, chat_state
|
| 256 |
image = None
|
| 257 |
llm_message, image = \
|
| 258 |
chat.answer(conv=chat_state, img_list=img_list, max_new_tokens=300, num_beams=1, temperature=temperature,
|
| 259 |
+
max_length=2000, radio=radio, text_input=text)
|
| 260 |
|
| 261 |
chatbot[-1][1] = llm_message
|
| 262 |
if chat_state[-1]["from"] == "gpt":
|
|
|
|
| 273 |
image = None
|
| 274 |
llm_message, image = \
|
| 275 |
chat.answer(conv=chat_state, img_list=img_list, max_new_tokens=300, num_beams=1, temperature=temperature,
|
| 276 |
+
max_length=2000, radio=radio, text_input=text)
|
| 277 |
|
| 278 |
chatbot[-1][1] = llm_message
|
| 279 |
if chat_state[-1]["from"] == "gpt":
|