File size: 325 Bytes
b32936b | 1 2 3 4 5 6 7 8 9 10 | from transformers import WhisperTokenizerFast
tokenizer = WhisperTokenizerFast.from_pretrained("openai/whisper-large-v3")
seq = [[[50258, 50259, 50359, 50363]]]
try:
tokenizer.batch_decode(seq, skip_special_tokens=False, decode_with_timestamps=True)
except Exception as e:
import traceback
traceback.print_exc()
|