Spaces:
Running
on
T4
Running
on
T4
Update app.py
Browse files
app.py
CHANGED
|
@@ -8,7 +8,7 @@ import re
|
|
| 8 |
import json
|
| 9 |
from auditqa.sample_questions import QUESTIONS
|
| 10 |
from auditqa.reports import POSSIBLE_REPORTS
|
| 11 |
-
from auditqa.engine.prompts import audience_prompts, answer_prompt_template
|
| 12 |
from auditqa.doc_process import process_pdf
|
| 13 |
from langchain_core.prompts import ChatPromptTemplate
|
| 14 |
from langchain_core.output_parsers import StrOutputParser
|
|
@@ -71,7 +71,8 @@ async def chat(query,history,sources,reports):
|
|
| 71 |
|
| 72 |
|
| 73 |
# get prompt
|
| 74 |
-
|
|
|
|
| 75 |
|
| 76 |
# get llm
|
| 77 |
llm_qa = HuggingFaceEndpoint(
|
|
|
|
| 8 |
import json
|
| 9 |
from auditqa.sample_questions import QUESTIONS
|
| 10 |
from auditqa.reports import POSSIBLE_REPORTS
|
| 11 |
+
from auditqa.engine.prompts import audience_prompts, answer_prompt_template, llama3_prompt_template, system_propmt, user_propmt
|
| 12 |
from auditqa.doc_process import process_pdf
|
| 13 |
from langchain_core.prompts import ChatPromptTemplate
|
| 14 |
from langchain_core.output_parsers import StrOutputParser
|
|
|
|
| 71 |
|
| 72 |
|
| 73 |
# get prompt
|
| 74 |
+
|
| 75 |
+
prompt = ChatPromptTemplate.from_template(llama3_prompt_template.format(system_propmt=system_propmt,user_propmt=user_propmt))
|
| 76 |
|
| 77 |
# get llm
|
| 78 |
llm_qa = HuggingFaceEndpoint(
|