seems like apply_chat_template doesnt work
inputs = giga10b18b_tokenizer.apply_chat_template(
message,
add_generation_prompt=True,
tokenize=True,
return_dict=True,
return_tensors="pt",
)
returns (while other tokenizers work fine on the same string):
ValueError Traceback (most recent call last)
Cell In[36], line 1
----> 1 inputs = giga10b18b_tokenizer.apply_chat_template(
2 message,
3 add_generation_prompt=True,
4 tokenize=True,
5 return_dict=True,
6 return_tensors="pt",
7 )
File /opt/conda/lib/python3.11/site-packages/transformers/tokenization_utils_base.py:1640, in PreTrainedTokenizerBase.apply_chat_template(self, conversation, tools, documents, chat_template, add_generation_prompt, continue_final_message, tokenize, padding, truncation, max_length, return_tensors, return_dict, return_assistant_tokens_mask, tokenizer_kwargs, **kwargs)
1637 raise ValueError("continue_final_message is not compatible with return_assistant_tokens_mask.")
1639 template_kwargs = {**self.special_tokens_map, **kwargs} # kwargs overwrite special tokens if both are present
-> 1640 rendered_chat, generation_indices = render_jinja_template(
1641 conversations=conversations,
1642 tools=tools,
1643 documents=documents,
1644 chat_template=chat_template,
1645 return_assistant_tokens_mask=return_assistant_tokens_mask,
1646 continue_final_message=continue_final_message,
1647 add_generation_prompt=add_generation_prompt,
1648 **template_kwargs,
1649 )
1651 if not is_batched:
1652 rendered_chat = rendered_chat[0]
File /opt/conda/lib/python3.11/site-packages/transformers/utils/chat_template_utils.py:521, in render_jinja_template(conversations, tools, documents, chat_template, return_assistant_tokens_mask, continue_final_message, add_generation_prompt, **kwargs)
519 all_generation_indices.append(generation_indices)
520 else:
--> 521 rendered_chat = compiled_template.render(
522 messages=chat,
523 tools=tool_schemas,
524 documents=documents,
525 add_generation_prompt=add_generation_prompt,
526 **kwargs,
527 )
528 if continue_final_message:
529 final_message = chat[-1]["content"]
File /opt/conda/lib/python3.11/site-packages/jinja2/environment.py:1295, in Template.render(self, *args, **kwargs)
1293 return self.environment.concat(self.root_render_func(ctx)) # type: ignore
1294 except Exception:
-> 1295 self.environment.handle_exception()
File /opt/conda/lib/python3.11/site-packages/jinja2/environment.py:942, in Environment.handle_exception(self, source)
937 """Exception handling helper. This is used internally to either raise
938 rewritten exceptions or return a rendered traceback for the template.
939 """
940 from .debug import rewrite_traceback_stack
--> 942 raise rewrite_traceback_stack(source=source)
File :306, in top-level template code()
ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
Can you pls provide
- the whole script how you instantiate tokenizer and apply chat template
- version of transformers
- full error log file
probably try passing 'messages' as List[Dict], not as np.ndarray