Spaces:
Runtime error
Runtime error
Commit
·
68aad16
1
Parent(s):
4c8df35
add section titles and rescale buttons
Browse files
app.py
CHANGED
|
@@ -208,17 +208,18 @@ def demo():
|
|
| 208 |
|
| 209 |
gr.Markdown(
|
| 210 |
"""
|
| 211 |
-
<center><
|
| 212 |
-
<h3>Ask any questions about your PDF documents</h3>
|
| 213 |
-
"""
|
| 214 |
-
)
|
| 215 |
-
gr.Markdown(
|
| 216 |
-
"""<b>Note:</b> This AI assistant, using Langchain and open-source LLMs, performs retrieval-augmented generation (RAG) from your PDF documents. \
|
| 217 |
-
This chatbot takes past questions into account when generating answers (via conversational memory), and includes document references for clarity purposes.<br>
|
| 218 |
"""
|
| 219 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 220 |
|
| 221 |
with gr.Tab("Chatbot configuration"):
|
|
|
|
| 222 |
with gr.Row():
|
| 223 |
document = gr.Files(
|
| 224 |
height=100,
|
|
@@ -229,6 +230,7 @@ def demo():
|
|
| 229 |
)
|
| 230 |
# upload_btn = gr.UploadButton("Loading document...", height=100, file_count="multiple", file_types=["pdf"], scale=1)
|
| 231 |
|
|
|
|
| 232 |
with gr.Row():
|
| 233 |
with gr.Row():
|
| 234 |
db_btn = gr.Radio(
|
|
@@ -261,13 +263,14 @@ def demo():
|
|
| 261 |
info="Chunk overlap",
|
| 262 |
interactive=True,
|
| 263 |
)
|
| 264 |
-
|
| 265 |
-
|
| 266 |
with gr.Row():
|
| 267 |
db_progress = gr.Textbox(
|
| 268 |
label="Vector database initialization", value="0% Configure the DB"
|
| 269 |
)
|
| 270 |
|
|
|
|
| 271 |
with gr.Row():
|
| 272 |
with gr.Row():
|
| 273 |
llm_btn = gr.Radio(
|
|
@@ -308,11 +311,13 @@ def demo():
|
|
| 308 |
info="Model top-k samples",
|
| 309 |
interactive=True,
|
| 310 |
)
|
| 311 |
-
|
| 312 |
-
|
|
|
|
|
|
|
| 313 |
with gr.Row():
|
| 314 |
llm_progress = gr.Textbox(
|
| 315 |
-
value="
|
| 316 |
)
|
| 317 |
|
| 318 |
with gr.Tab("Chatbot"):
|
|
|
|
| 208 |
|
| 209 |
gr.Markdown(
|
| 210 |
"""
|
| 211 |
+
<center><h1>Chat with your PDF!</center></h1>
|
| 212 |
+
<center><h3>Ask any questions about your PDF documents</h3><center>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
"""
|
| 214 |
)
|
| 215 |
+
# gr.Markdown(
|
| 216 |
+
# """<b>Note:</b> This AI assistant, using Langchain and open-source LLMs, performs retrieval-augmented generation (RAG) from your PDF documents. \
|
| 217 |
+
# This chatbot takes past questions into account when generating answers (via conversational memory), and includes document references for clarity purposes.<br>
|
| 218 |
+
# """
|
| 219 |
+
# )
|
| 220 |
|
| 221 |
with gr.Tab("Chatbot configuration"):
|
| 222 |
+
gr.Markdown("1. Upload the PDF(s)")
|
| 223 |
with gr.Row():
|
| 224 |
document = gr.Files(
|
| 225 |
height=100,
|
|
|
|
| 230 |
)
|
| 231 |
# upload_btn = gr.UploadButton("Loading document...", height=100, file_count="multiple", file_types=["pdf"], scale=1)
|
| 232 |
|
| 233 |
+
gr.Markdown("2. Configure the vector database")
|
| 234 |
with gr.Row():
|
| 235 |
with gr.Row():
|
| 236 |
db_btn = gr.Radio(
|
|
|
|
| 263 |
info="Chunk overlap",
|
| 264 |
interactive=True,
|
| 265 |
)
|
| 266 |
+
with gr.Row():
|
| 267 |
+
db_btn = gr.Button("Generate vector database", size="sm")
|
| 268 |
with gr.Row():
|
| 269 |
db_progress = gr.Textbox(
|
| 270 |
label="Vector database initialization", value="0% Configure the DB"
|
| 271 |
)
|
| 272 |
|
| 273 |
+
gr.Markdown("3. Configure the LLM model")
|
| 274 |
with gr.Row():
|
| 275 |
with gr.Row():
|
| 276 |
llm_btn = gr.Radio(
|
|
|
|
| 311 |
info="Model top-k samples",
|
| 312 |
interactive=True,
|
| 313 |
)
|
| 314 |
+
with gr.Row():
|
| 315 |
+
qachain_btn = gr.Button(
|
| 316 |
+
"Initialize Question Answering chain", size="sm"
|
| 317 |
+
)
|
| 318 |
with gr.Row():
|
| 319 |
llm_progress = gr.Textbox(
|
| 320 |
+
value="QA chain initialization", label="0% Configure the QA chain"
|
| 321 |
)
|
| 322 |
|
| 323 |
with gr.Tab("Chatbot"):
|