fix: use linear rope_type instead of removed default for transformers compat
Browse filesThe "default" rope_type was removed in recent transformers versions. Using "linear" (with no scaling factor) is functionally equivalent.
- modeling_llada2_moe.py +1 -1
modeling_llada2_moe.py
CHANGED
|
@@ -100,7 +100,7 @@ class LLaDA2MoeRotaryEmbedding(nn.Module):
|
|
| 100 |
"rope_type", config.rope_scaling.get("type")
|
| 101 |
)
|
| 102 |
else:
|
| 103 |
-
self.rope_type = "
|
| 104 |
self.max_seq_len_cached = config.max_position_embeddings
|
| 105 |
self.original_max_seq_len = config.max_position_embeddings
|
| 106 |
|
|
|
|
| 100 |
"rope_type", config.rope_scaling.get("type")
|
| 101 |
)
|
| 102 |
else:
|
| 103 |
+
self.rope_type = "linear"
|
| 104 |
self.max_seq_len_cached = config.max_position_embeddings
|
| 105 |
self.original_max_seq_len = config.max_position_embeddings
|
| 106 |
|