| from dataclasses import dataclass |
| from typing import Any, Dict, List |
|
|
| import torch |
|
|
|
|
| @dataclass |
| class MyCollator: |
| pad_token_id: int |
| attention_pad_value: int = 0 |
| label_pad_value: int = -100 |
|
|
| def __call__(self, features: List[Dict[str, Any]]) -> Dict[str, Any]: |
| |
| input_ids = [f["input_ids"] for f in features] |
| placeholder_indices = [f["placeholder_indices"] for f in features] |
| super_input_ids = [f["super_input_ids"] for f in features] |
| super_token_indices = [f["super_token_indices"] for f in features] |
| labels = [f["labels"] for f in features] if "labels" in features[0] else None |
|
|
| |
| ( |
| input_ids, |
| attention_mask, |
| placeholder_indices, |
| labels, |
| ) = self.process_model_inputs( |
| input_ids, |
| placeholder_indices, |
| labels, |
| ) |
|
|
| |
| ( |
| super_input_ids, |
| super_attention_mask, |
| super_token_indices, |
| ) = self.process_super_tokenizer_inputs( |
| super_input_ids, |
| super_token_indices, |
| ) |
|
|
| |
| input_ids = torch.tensor(input_ids) |
| attention_mask = torch.tensor(attention_mask) |
| super_input_ids = torch.tensor(super_input_ids) |
| super_attention_mask = torch.tensor(super_attention_mask) |
| labels = torch.tensor(labels) if labels else None |
|
|
| |
| res = { |
| "input_ids": input_ids, |
| "attention_mask": attention_mask, |
| "super_input_ids": super_input_ids, |
| "super_attention_mask": super_attention_mask, |
| "placeholder_indices": placeholder_indices, |
| "super_token_indices": super_token_indices, |
| "labels": labels, |
| } |
|
|
| return res |
| |
| def process_model_inputs(self, input_ids, placeholder_indices, labels): |
| |
| max_len = get_max_length_in_nested_lists(input_ids) |
| attention_mask = get_attention_mask_from_nested_lists(input_ids) |
|
|
| |
| placeholder_indices = [ |
| [idx + max_len - len(input_ids[i]) for idx in placeholder_indices[i]] |
| for i in range(len(placeholder_indices)) |
| ] |
|
|
| |
| input_ids = pad_nested_lists(input_ids, max_len, self.pad_token_id, "left") |
| attention_mask = pad_nested_lists(attention_mask, max_len, self.attention_pad_value, "left") |
| if labels: |
| labels = pad_nested_lists(labels, max_len, self.label_pad_value, "left") |
|
|
| return input_ids, attention_mask, placeholder_indices, labels |
|
|
| def process_super_tokenizer_inputs(self, input_ids, super_token_indices): |
| |
| input_ids = sum(input_ids, []) |
| super_token_indices = sum(super_token_indices, []) |
| |
| |
| new_input_ids = [] |
| new_super_token_indices = [] |
| for i in range(len(input_ids)): |
| if len(super_token_indices[i]) != 0: |
| new_input_ids.append(input_ids[i]) |
| new_super_token_indices.append(super_token_indices[i]) |
| input_ids = new_input_ids |
| super_token_indices = new_super_token_indices |
| |
| if len(input_ids) == 0: |
| return [], [], [] |
| |
| |
| max_len = get_max_length_in_nested_lists(input_ids) |
| attention_mask = get_attention_mask_from_nested_lists(input_ids) |
| |
| input_ids = pad_nested_lists(input_ids, max_len, self.pad_token_id) |
| attention_mask = pad_nested_lists(attention_mask, max_len, self.attention_pad_value) |
|
|
| return input_ids, attention_mask, super_token_indices |
|
|
| def get_max_length_in_nested_lists(lst): |
| if isinstance(lst[0], list): |
| lengths = [] |
| for elem in lst: |
| length = get_max_length_in_nested_lists(elem) |
| lengths.append(length) |
| max_length = max(lengths) |
| return max_length |
| else: |
| return len(lst) |
|
|
|
|
| def get_attention_mask_from_nested_lists(lst): |
| if isinstance(lst[0], list): |
| attention_mask = [] |
| for elem in lst: |
| mask = get_attention_mask_from_nested_lists(elem) |
| attention_mask.append(mask) |
| return attention_mask |
| else: |
| return [1] * len(lst) |
|
|
|
|
| def pad_nested_lists(lst, max_length, padding_value, padding_side="right"): |
| if isinstance(lst, list) and len(lst) and isinstance(lst[0], list): |
| for i, elem in enumerate(lst): |
| lst[i] = pad_nested_lists(elem, max_length, padding_value, padding_side) |
| return lst |
| elif isinstance(lst, list): |
| if padding_side == "right": |
| return lst + [padding_value for _ in range(max_length - len(lst))] |
| else: |
| return [padding_value for _ in range(max_length - len(lst))] + lst |
| else: |
| raise NotImplementedError(f"Unrecognized type {lst}") |
| |