1 single fp16 safetensors file ?

#1
by pikkaa - opened

i can merge to 1 safetensors file without need for those config files ?

from transformers import AutoModelForCausalLM, AutoTokenizer
import torch

model_id = "huihui-ai/Huihui-Qwen3-4B-Thinking-2507-abliterated"

tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
    model_id,
    torch_dtype=torch.float16,
    device_map="cpu", 
    trust_remote_code=True
)

output_dir = "huihui-ai/Huihui-Qwen3-4B-Thinking-2507-abliterated-single"
model.save_pretrained(
    output_dir,
    max_shard_size="10GB",
    safe_serialization=True
)
tokenizer.save_pretrained(output_dir)

Sign up or log in to comment