Geodesic-Phantom-12B / mergekit_config.yml
OrobasVault's picture
Upload folder using huggingface_hub
4edccb2 verified
raw
history blame contribute delete
684 Bytes
architecture: MistralForCausalLM
base_model: /workspace/models/mistralai--Mistral-Nemo-Instruct-2407
models:
- model: /workspace/models/Vortex5--Prototype-X-12b
- model: /workspace/models/Vortex5--Celestial-Queen-12B
- model: /workspace/models/Vortex5--Wicked-Nebula-12B
- model: /workspace/models/Vortex5--Stellar-Witch-12B
- model: /workspace/models/Vortex5--Moonlit-Mirage-12B
- model: /workspace/models/Vortex5--Crimson-Constellation-12B
merge_method: karcher_stock # v8
parameters:
filter_wise: true
max_iter: 10000
min_iter: 1000
tol: 1.0e-11
dtype: float32
out_dtype: bfloat16
tokenizer:
source: union
chat_template: auto
name: 👻 Geodesic Phantom 12B