Spaces:
Sleeping
Sleeping
Update backend/query_llm.py
Browse files- backend/query_llm.py +2 -1
backend/query_llm.py
CHANGED
|
@@ -53,13 +53,14 @@ def format_prompt(message: str, api_kind: str):
|
|
| 53 |
|
| 54 |
# Create a list of message dictionaries with role and content
|
| 55 |
messages1: List[Dict[str, Any]] = [{'role': 'user', 'content': message}]
|
| 56 |
-
messages2: List[Dict[str, Any]] = [{'role': 'user', '
|
| 57 |
|
| 58 |
if api_kind == "openai":
|
| 59 |
return messages1
|
| 60 |
elif api_kind == "hf":
|
| 61 |
return tokenizer.apply_chat_template(messages, tokenize=False)
|
| 62 |
elif api_kind=="gemini":
|
|
|
|
| 63 |
return messages2
|
| 64 |
else:
|
| 65 |
raise ValueError("API is not supported")
|
|
|
|
| 53 |
|
| 54 |
# Create a list of message dictionaries with role and content
|
| 55 |
messages1: List[Dict[str, Any]] = [{'role': 'user', 'content': message}]
|
| 56 |
+
messages2: List[Dict[str, Any]] = [{'role': 'user', 'parts': message}]
|
| 57 |
|
| 58 |
if api_kind == "openai":
|
| 59 |
return messages1
|
| 60 |
elif api_kind == "hf":
|
| 61 |
return tokenizer.apply_chat_template(messages, tokenize=False)
|
| 62 |
elif api_kind=="gemini":
|
| 63 |
+
print(messages2)
|
| 64 |
return messages2
|
| 65 |
else:
|
| 66 |
raise ValueError("API is not supported")
|