|
@@ -900,13 +900,13 @@ async def process_chat_payload(request, form_data, user, metadata, model):
|
|
|
citated_file_idx = {}
|
|
|
for _, source in enumerate(sources, 1):
|
|
|
if "document" in source:
|
|
|
- for doc_context, doc_meta in zip(source["document"], source['metadata']):
|
|
|
- file_id = doc_meta.get('file_id')
|
|
|
+ for doc_context, doc_meta in zip(
|
|
|
+ source["document"], source["metadata"]
|
|
|
+ ):
|
|
|
+ file_id = doc_meta.get("file_id")
|
|
|
if file_id not in citated_file_idx:
|
|
|
citated_file_idx[file_id] = len(citated_file_idx) + 1
|
|
|
- context_string += (
|
|
|
- f'<source id="{citated_file_idx[file_id]}">{doc_context}</source>\n'
|
|
|
- )
|
|
|
+ context_string += f'<source id="{citated_file_idx[file_id]}">{doc_context}</source>\n'
|
|
|
|
|
|
context_string = context_string.strip()
|
|
|
prompt = get_last_user_message(form_data["messages"])
|
|
@@ -1613,6 +1613,9 @@ async def process_chat_response(
|
|
|
)
|
|
|
|
|
|
if data:
|
|
|
+ if "event" in data:
|
|
|
+ await event_emitter(data.get("event", {}))
|
|
|
+
|
|
|
if "selected_model_id" in data:
|
|
|
model_id = data["selected_model_id"]
|
|
|
Chats.upsert_message_to_chat_by_id_and_message_id(
|