|
@@ -342,9 +342,9 @@ async def model_response_handler(request, channel, message, user):
|
|
|
|
|
|
system_message = {
|
|
|
"role": "system",
|
|
|
- "content": f"You are {model.get('name', model_id)}, an AI assistant participating in a threaded conversation. Be helpful, concise, and conversational."
|
|
|
+ "content": f"You are {model.get('name', model_id)}, participating in a threaded conversation. Be concise and conversational."
|
|
|
+ (
|
|
|
- f"Here's the thread history:\n\n{''.join([f'{msg}' for msg in thread_history])}\n\nContinue the conversation naturally, addressing the most recent message while being aware of the full context."
|
|
|
+ f"Here's the thread history:\n\n{''.join([f'{msg}' for msg in thread_history])}\n\nContinue the conversation naturally as {model.get('name', model_id)}, addressing the most recent message while being aware of the full context."
|
|
|
if thread_history
|
|
|
else ""
|
|
|
),
|