gemma300-vi-trimmed
Browse files
gemma300-vi-trimmed/config.json
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_sliding_window_pattern": 6,
|
| 3 |
+
"architectures": [
|
| 4 |
+
"Gemma3TextModel"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"attn_logit_softcapping": null,
|
| 9 |
+
"bos_token_id": 2,
|
| 10 |
+
"dtype": "float32",
|
| 11 |
+
"eos_token_id": 1,
|
| 12 |
+
"final_logit_softcapping": null,
|
| 13 |
+
"head_dim": 256,
|
| 14 |
+
"hidden_activation": "gelu_pytorch_tanh",
|
| 15 |
+
"hidden_size": 768,
|
| 16 |
+
"initializer_range": 0.02,
|
| 17 |
+
"intermediate_size": 1152,
|
| 18 |
+
"layer_types": [
|
| 19 |
+
"sliding_attention",
|
| 20 |
+
"sliding_attention",
|
| 21 |
+
"sliding_attention",
|
| 22 |
+
"sliding_attention",
|
| 23 |
+
"sliding_attention",
|
| 24 |
+
"full_attention",
|
| 25 |
+
"sliding_attention",
|
| 26 |
+
"sliding_attention",
|
| 27 |
+
"sliding_attention",
|
| 28 |
+
"sliding_attention",
|
| 29 |
+
"sliding_attention",
|
| 30 |
+
"full_attention",
|
| 31 |
+
"sliding_attention",
|
| 32 |
+
"sliding_attention",
|
| 33 |
+
"sliding_attention",
|
| 34 |
+
"sliding_attention",
|
| 35 |
+
"sliding_attention",
|
| 36 |
+
"full_attention",
|
| 37 |
+
"sliding_attention",
|
| 38 |
+
"sliding_attention",
|
| 39 |
+
"sliding_attention",
|
| 40 |
+
"sliding_attention",
|
| 41 |
+
"sliding_attention",
|
| 42 |
+
"full_attention"
|
| 43 |
+
],
|
| 44 |
+
"max_position_embeddings": 2048,
|
| 45 |
+
"model_type": "gemma3_text",
|
| 46 |
+
"num_attention_heads": 3,
|
| 47 |
+
"num_hidden_layers": 24,
|
| 48 |
+
"num_key_value_heads": 1,
|
| 49 |
+
"pad_token_id": 0,
|
| 50 |
+
"query_pre_attn_scalar": 256,
|
| 51 |
+
"rms_norm_eps": 1e-06,
|
| 52 |
+
"rope_parameters": {
|
| 53 |
+
"full_attention": {
|
| 54 |
+
"rope_theta": 1000000.0,
|
| 55 |
+
"rope_type": "default"
|
| 56 |
+
},
|
| 57 |
+
"sliding_attention": {
|
| 58 |
+
"rope_theta": 10000.0,
|
| 59 |
+
"rope_type": "default"
|
| 60 |
+
}
|
| 61 |
+
},
|
| 62 |
+
"sliding_window": 129,
|
| 63 |
+
"tie_word_embeddings": true,
|
| 64 |
+
"transformers_version": "5.5.3",
|
| 65 |
+
"use_bidirectional_attention": true,
|
| 66 |
+
"use_cache": true,
|
| 67 |
+
"vocab_size": 47465,
|
| 68 |
+
"vocabtrimmer": {
|
| 69 |
+
"mining_config": {
|
| 70 |
+
"dataset": [
|
| 71 |
+
"crosslingual/original/merged_queries_vi.json",
|
| 72 |
+
"crosslingual/eval/filtered_corpus.json"
|
| 73 |
+
],
|
| 74 |
+
"dataset_column": "text",
|
| 75 |
+
"dataset_name": null,
|
| 76 |
+
"dataset_split": "validation",
|
| 77 |
+
"language": "vi",
|
| 78 |
+
"min_frequency": 1,
|
| 79 |
+
"target_vocab_size": null
|
| 80 |
+
},
|
| 81 |
+
"stats": {
|
| 82 |
+
"compression_rate_embedding": 18.106460571289062,
|
| 83 |
+
"compression_rate_full": 45.5617175474765,
|
| 84 |
+
"parameter_size_embedding/raw": 201326592,
|
| 85 |
+
"parameter_size_embedding/trimmed": 36453120,
|
| 86 |
+
"parameter_size_full/raw": 302863104,
|
| 87 |
+
"parameter_size_full/trimmed": 137989632,
|
| 88 |
+
"vocab_size/raw": 262144,
|
| 89 |
+
"vocab_size/trimmed": 47465
|
| 90 |
+
}
|
| 91 |
+
}
|
| 92 |
+
}
|
gemma300-vi-trimmed/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:88a53f58bf8c59f3dbbff4d449c4f0cc4e9ae21d7c73b94cf02bf1190520720f
|
| 3 |
+
size 551991848
|
gemma300-vi-trimmed/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
gemma300-vi-trimmed/tokenizer_config.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"backend": "tokenizers",
|
| 3 |
+
"boi_token": "<start_of_image>",
|
| 4 |
+
"bos_token": "<bos>",
|
| 5 |
+
"clean_up_tokenization_spaces": false,
|
| 6 |
+
"eoi_token": "<end_of_image>",
|
| 7 |
+
"eos_token": "<eos>",
|
| 8 |
+
"image_token": "<image_soft_token>",
|
| 9 |
+
"is_local": false,
|
| 10 |
+
"mask_token": "<mask>",
|
| 11 |
+
"model_max_length": 2048,
|
| 12 |
+
"model_specific_special_tokens": {
|
| 13 |
+
"boi_token": "<start_of_image>",
|
| 14 |
+
"eoi_token": "<end_of_image>",
|
| 15 |
+
"image_token": "<image_soft_token>"
|
| 16 |
+
},
|
| 17 |
+
"pad_token": "<pad>",
|
| 18 |
+
"padding_side": "right",
|
| 19 |
+
"sp_model_kwargs": null,
|
| 20 |
+
"spaces_between_special_tokens": false,
|
| 21 |
+
"tokenizer_class": "GemmaTokenizer",
|
| 22 |
+
"unk_token": "<unk>",
|
| 23 |
+
"use_default_system_prompt": false
|
| 24 |
+
}
|