浏览代码

fix llama 3.2 issue with apply_chat_template assuming messages is a list if its not a dict fixes #239

Alex Cheema 7 月之前
父节点
当前提交
2ebcf5f407
共有 1 个文件被更改,包括 1 次插入1 次删除
  1. 1 1
      exo/api/chatgpt_api.py

+ 1 - 1
exo/api/chatgpt_api.py

@@ -115,7 +115,7 @@ def remap_messages(messages: List[Message]) -> List[Message]:
 
 def build_prompt(tokenizer, _messages: List[Message]):
   messages = remap_messages(_messages)
-  prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
+  prompt = tokenizer.apply_chat_template([m.to_dict() for m in messages], tokenize=False, add_generation_prompt=True)
   image_str = None
   for message in messages:
     if not isinstance(message.content, list):