Temmp1e commited on
Commit
529f8bf
·
verified ·
1 Parent(s): db124ab

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -17
config.json CHANGED
@@ -41,7 +41,7 @@
41
  "device": "cuda",
42
  "use_amp": false,
43
  "push_to_hub": true,
44
- "repo_id": "Temmp1e/pi0_grab",
45
  "private": null,
46
  "tags": null,
47
  "license": null,
@@ -50,37 +50,28 @@
50
  "max_state_dim": 32,
51
  "max_action_dim": 32,
52
  "resize_imgs_with_padding": [
53
- 512,
54
- 512
55
  ],
56
  "empty_cameras": 0,
57
  "adapt_to_pi_aloha": false,
58
  "use_delta_joint_actions_aloha": false,
59
  "tokenizer_max_length": 48,
 
60
  "num_steps": 10,
61
  "use_cache": true,
 
62
  "freeze_vision_encoder": true,
63
- "train_expert_only": true,
64
  "train_state_proj": true,
65
- "optimizer_lr": 0.0001,
66
  "optimizer_betas": [
67
  0.9,
68
  0.95
69
  ],
70
  "optimizer_eps": 1e-08,
71
  "optimizer_weight_decay": 1e-10,
72
- "optimizer_grad_clip_norm": 10.0,
73
  "scheduler_warmup_steps": 1000,
74
  "scheduler_decay_steps": 30000,
75
- "scheduler_decay_lr": 2.5e-06,
76
- "add_image_special_tokens": false,
77
- "attention_mode": "cross_attn",
78
- "prefix_length": 0,
79
- "pad_language_to": "max_length",
80
- "num_expert_layers": 0,
81
- "num_vlm_layers": 16,
82
- "self_attn_every_n_layers": 2,
83
- "expert_width_multiplier": 0.75,
84
- "min_period": 0.004,
85
- "max_period": 4.0
86
  }
 
41
  "device": "cuda",
42
  "use_amp": false,
43
  "push_to_hub": true,
44
+ "repo_id": "mizutoukotori/pi0_so101",
45
  "private": null,
46
  "tags": null,
47
  "license": null,
 
50
  "max_state_dim": 32,
51
  "max_action_dim": 32,
52
  "resize_imgs_with_padding": [
53
+ 224,
54
+ 224
55
  ],
56
  "empty_cameras": 0,
57
  "adapt_to_pi_aloha": false,
58
  "use_delta_joint_actions_aloha": false,
59
  "tokenizer_max_length": 48,
60
+ "proj_width": 1024,
61
  "num_steps": 10,
62
  "use_cache": true,
63
+ "attention_implementation": "eager",
64
  "freeze_vision_encoder": true,
65
+ "train_expert_only": false,
66
  "train_state_proj": true,
67
+ "optimizer_lr": 2.5e-05,
68
  "optimizer_betas": [
69
  0.9,
70
  0.95
71
  ],
72
  "optimizer_eps": 1e-08,
73
  "optimizer_weight_decay": 1e-10,
 
74
  "scheduler_warmup_steps": 1000,
75
  "scheduler_decay_steps": 30000,
76
+ "scheduler_decay_lr": 2.5e-06
 
 
 
 
 
 
 
 
 
 
77
  }