{ "num_conversations": 3968, "model_config": { "vocab_size": 2000, "d_model": 256, "n_heads": 8, "n_layers": 6, "d_ff": 1024, "dropout": 0.1, "max_len": 512 }, "tokenizer_vocab_size": 2000 }