Spaces:
Running
on
T4
Running
on
T4
Update app.py
Browse files
app.py
CHANGED
|
@@ -75,11 +75,11 @@ async def chat(query,history,sources,reports):
|
|
| 75 |
prompt = ChatPromptTemplate.from_template(llama3_prompt_template.format(system_prompt=system_propmt,user_prompt=user_propmt))
|
| 76 |
|
| 77 |
# get llm
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
|
| 84 |
# trying llm new-prompt adapted for llama-3
|
| 85 |
# https://stackoverflow.com/questions/78429932/langchain-ollama-and-llama-3-prompt-and-response
|
|
|
|
| 75 |
prompt = ChatPromptTemplate.from_template(llama3_prompt_template.format(system_prompt=system_propmt,user_prompt=user_propmt))
|
| 76 |
|
| 77 |
# get llm
|
| 78 |
+
# llm_qa = HuggingFaceEndpoint(
|
| 79 |
+
# endpoint_url= "https://mnczdhmrf7lkfd9d.eu-west-1.aws.endpoints.huggingface.cloud",
|
| 80 |
+
# task="text-generation",
|
| 81 |
+
# huggingfacehub_api_token=HF_token,
|
| 82 |
+
# model_kwargs={})
|
| 83 |
|
| 84 |
# trying llm new-prompt adapted for llama-3
|
| 85 |
# https://stackoverflow.com/questions/78429932/langchain-ollama-and-llama-3-prompt-and-response
|