irodkin commited on
Commit
dcfe48c
·
verified ·
1 Parent(s): b664a66

Training checkpoint at step 22500

Browse files
Files changed (1) hide show
  1. config.json +39 -0
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "act_format": "linear",
3
+ "act_on": false,
4
+ "act_type": "associative",
5
+ "architectures": [
6
+ "InnerLoopARMTForCausalLM"
7
+ ],
8
+ "attend_to_previous_input": false,
9
+ "attn_implementation": "flash_attention_2",
10
+ "base_model_config": null,
11
+ "base_model_name": "google/gemma-3-1b-it",
12
+ "constant_depth": false,
13
+ "correction": true,
14
+ "d_mem": 64,
15
+ "dtype": "bfloat16",
16
+ "freeze_mem": false,
17
+ "gating": false,
18
+ "layers_attr": "model.layers",
19
+ "max_hop": 4,
20
+ "memory_dtype": "bfloat16",
21
+ "model_dtype": "bfloat16",
22
+ "model_type": "armt",
23
+ "n_heads": 1,
24
+ "noisy_halting": false,
25
+ "num_mem_tokens": 32,
26
+ "segment_alignment": "left",
27
+ "segment_size": 1024,
28
+ "sliding_window_enabled": false,
29
+ "time_penalty": 0.0,
30
+ "transformers_version": "4.57.1",
31
+ "use_denom": true,
32
+ "use_sink": false,
33
+ "wrap_layers": null,
34
+ "wrap_pos": false,
35
+ "auto_map": {
36
+ "AutoConfig": "modeling_armt.ARMTConfig",
37
+ "AutoModelForCausalLM": "modeling_armt.InnerLoopARMTForCausalLM"
38
+ }
39
+ }