Browse Source

fix llama 3.2 issue with apply_chat_template assuming messages is a list if its not a dict fixes #239

Alex Cheema 7 months ago
parent
commit
2ebcf5f407
1 changed files with 1 additions and 1 deletions
  1. 1 1
      exo/api/chatgpt_api.py

+ 1 - 1
exo/api/chatgpt_api.py

@@ -115,7 +115,7 @@ def remap_messages(messages: List[Message]) -> List[Message]:
 
 def build_prompt(tokenizer, _messages: List[Message]):
   messages = remap_messages(_messages)
-  prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
+  prompt = tokenizer.apply_chat_template([m.to_dict() for m in messages], tokenize=False, add_generation_prompt=True)
   image_str = None
   for message in messages:
     if not isinstance(message.content, list):