bigmoyan commited on
Commit
2755962
·
1 Parent(s): 9e6eed7

support-transformers-inference (#26)

Browse files

- fix for transformers inference (baded737743fc2c5170986e6e6765b603a086d33)

Files changed (3) hide show
  1. config.json +4 -2
  2. modeling_deepseek.py +1 -1
  3. modeling_kimi_k25.py +3 -18
config.json CHANGED
@@ -116,10 +116,12 @@
116
  },
117
  "format": "pack-quantized",
118
  "ignore": [
119
- "lm_head",
120
  "re:.*self_attn.*",
121
  "re:.*shared_experts.*",
122
- "re:.*mlp\\.(gate|up|gate_up|down)_proj.*"
 
 
 
123
  ],
124
  "kv_cache_scheme": null,
125
  "quant_method": "compressed-tensors",
 
116
  },
117
  "format": "pack-quantized",
118
  "ignore": [
 
119
  "re:.*self_attn.*",
120
  "re:.*shared_experts.*",
121
+ "re:.*mlp\\.(gate|up|gate_up|down)_proj.*",
122
+ "re:.*lm_head.*",
123
+ "re:vision_tower.*",
124
+ "re:mm_projector.*"
125
  ],
126
  "kv_cache_scheme": null,
127
  "quant_method": "compressed-tensors",
modeling_deepseek.py CHANGED
@@ -1244,7 +1244,7 @@ class DeepseekV3PreTrainedModel(PreTrainedModel):
1244
 
1245
  def _init_weights(self, module):
1246
  std = self.config.initializer_range
1247
- if isinstance(module, nn.Linear) and hasattr(module, "weight"):
1248
  module.weight.data.normal_(mean=0.0, std=std)
1249
  if module.bias is not None:
1250
  module.bias.data.zero_()
 
1244
 
1245
  def _init_weights(self, module):
1246
  std = self.config.initializer_range
1247
+ if isinstance(module, nn.Linear):
1248
  module.weight.data.normal_(mean=0.0, std=std)
1249
  if module.bias is not None:
1250
  module.bias.data.zero_()
modeling_kimi_k25.py CHANGED
@@ -571,9 +571,8 @@ class MoonViT3dEncoder(nn.Module):
571
  self.rope_2d = Rope2DPosEmbRepeated(
572
  block_cfg['hidden_dim'] // block_cfg['num_heads'], 512, 512)
573
  self.blocks = nn.ModuleList([
574
- MoonViTEncoderLayer(
575
- **block_cfg,
576
- use_deterministic_attn=use_deterministic_attn)
577
  for _ in range(num_layers)
578
  ])
579
  self.final_layernorm = nn.LayerNorm(hidden_dim)
@@ -638,20 +637,6 @@ class MoonViT3dPretrainedModel(PreTrainedModel):
638
  _no_split_modules = ['PackingTransformer']
639
  _supports_flash_attn_2 = True
640
  _supports_sdpa = True
641
-
642
- def _init_weights(self, module):
643
- # Default PreTrainedModel._init_weights treats nn.Linear subclasses (e.g.
644
- # compressed_tensors.CompressedLinear) as Linear but those layers may have
645
- # no `weight` after compression; skip in that case.
646
- std = getattr(self.config, "initializer_range", 0.02)
647
- if isinstance(module, (nn.Linear, nn.Conv2d)) and hasattr(module, "weight"):
648
- module.weight.data.normal_(mean=0.0, std=std)
649
- if module.bias is not None:
650
- module.bias.data.zero_()
651
- elif isinstance(module, nn.Embedding):
652
- module.weight.data.normal_(mean=0.0, std=std)
653
- if module.padding_idx is not None:
654
- module.weight.data[module.padding_idx].zero_()
655
 
656
  def __init__(self, config, *inputs, **kwargs):
657
  super().__init__(config, *inputs, **kwargs)
@@ -800,7 +785,7 @@ class KimiK25PreTrainedModel(PreTrainedModel):
800
  if hasattr(module, "class_embedding"):
801
  module.class_embedding.data.normal_(mean=0.0, std=std)
802
 
803
- if isinstance(module, (nn.Linear, nn.Conv2d)) and hasattr(module, "weight"):
804
  module.weight.data.normal_(mean=0.0, std=std)
805
  if module.bias is not None:
806
  module.bias.data.zero_()
 
571
  self.rope_2d = Rope2DPosEmbRepeated(
572
  block_cfg['hidden_dim'] // block_cfg['num_heads'], 512, 512)
573
  self.blocks = nn.ModuleList([
574
+ MoonViTEncoderLayer(**block_cfg,
575
+ use_deterministic_attn=use_deterministic_attn)
 
576
  for _ in range(num_layers)
577
  ])
578
  self.final_layernorm = nn.LayerNorm(hidden_dim)
 
637
  _no_split_modules = ['PackingTransformer']
638
  _supports_flash_attn_2 = True
639
  _supports_sdpa = True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
640
 
641
  def __init__(self, config, *inputs, **kwargs):
642
  super().__init__(config, *inputs, **kwargs)
 
785
  if hasattr(module, "class_embedding"):
786
  module.class_embedding.data.normal_(mean=0.0, std=std)
787
 
788
+ if isinstance(module, (nn.Linear, nn.Conv2d)):
789
  module.weight.data.normal_(mean=0.0, std=std)
790
  if module.bias is not None:
791
  module.bias.data.zero_()