File size: 684 Bytes
4edccb2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
architecture: MistralForCausalLM
base_model: /workspace/models/mistralai--Mistral-Nemo-Instruct-2407
models:
  - model: /workspace/models/Vortex5--Prototype-X-12b
  - model: /workspace/models/Vortex5--Celestial-Queen-12B
  - model: /workspace/models/Vortex5--Wicked-Nebula-12B
  - model: /workspace/models/Vortex5--Stellar-Witch-12B
  - model: /workspace/models/Vortex5--Moonlit-Mirage-12B
  - model: /workspace/models/Vortex5--Crimson-Constellation-12B
merge_method: karcher_stock # v8
parameters:  
  filter_wise: true
  max_iter: 10000
  min_iter: 1000
  tol: 1.0e-11
dtype: float32
out_dtype: bfloat16
tokenizer:
  source: union
chat_template: auto
name: 👻 Geodesic Phantom 12B