|
@@ -989,25 +989,24 @@ async def process_chat_payload(request, form_data, user, metadata, model):
|
|
|
if prompt is None:
|
|
|
raise Exception("No user message found")
|
|
|
|
|
|
- if context_string == "":
|
|
|
- if request.app.state.config.RELEVANCE_THRESHOLD == 0:
|
|
|
- log.debug(
|
|
|
- f"With a 0 relevancy threshold for RAG, the context cannot be empty"
|
|
|
- )
|
|
|
- else:
|
|
|
+ if context_string != "":
|
|
|
# Workaround for Ollama 2.0+ system prompt issue
|
|
|
# TODO: replace with add_or_update_system_message
|
|
|
if model.get("owned_by") == "ollama":
|
|
|
form_data["messages"] = prepend_to_first_user_message_content(
|
|
|
rag_template(
|
|
|
- request.app.state.config.RAG_TEMPLATE, context_string, prompt
|
|
|
+ request.app.state.config.RAG_TEMPLATE,
|
|
|
+ context_string,
|
|
|
+ prompt,
|
|
|
),
|
|
|
form_data["messages"],
|
|
|
)
|
|
|
else:
|
|
|
form_data["messages"] = add_or_update_system_message(
|
|
|
rag_template(
|
|
|
- request.app.state.config.RAG_TEMPLATE, context_string, prompt
|
|
|
+ request.app.state.config.RAG_TEMPLATE,
|
|
|
+ context_string,
|
|
|
+ prompt,
|
|
|
),
|
|
|
form_data["messages"],
|
|
|
)
|