base_model: model: path: mllm-dev/gpt2_f_experiment_0 dtype: float16 merge_method: dare_ties parameters: normalize: 1.0 slices: - sources: - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_0 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_1 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_2 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_3 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_4 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_5 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_6 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_7 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_8 parameters: density: 0.9 weight: 0.1 - layer_range: [0, 12] model: model: path: mllm-dev/gpt2_f_experiment_9 parameters: density: 0.9 weight: 0.1