kashif HF Staff commited on
Commit
52747ba
·
verified ·
1 Parent(s): bbb5715

fix: use linear rope_type instead of removed default for transformers compat

Browse files

The "default" rope_type was removed in recent transformers versions. Using "linear" (with no scaling factor) is functionally equivalent.

Files changed (1) hide show
  1. modeling_llada2_moe.py +1 -1
modeling_llada2_moe.py CHANGED
@@ -100,7 +100,7 @@ class LLaDA2MoeRotaryEmbedding(nn.Module):
100
  "rope_type", config.rope_scaling.get("type")
101
  )
102
  else:
103
- self.rope_type = "default"
104
  self.max_seq_len_cached = config.max_position_embeddings
105
  self.original_max_seq_len = config.max_position_embeddings
106
 
 
100
  "rope_type", config.rope_scaling.get("type")
101
  )
102
  else:
103
+ self.rope_type = "linear"
104
  self.max_seq_len_cached = config.max_position_embeddings
105
  self.original_max_seq_len = config.max_position_embeddings
106