| { |
| "repo_id": "jackvial/so101_pickplace_recap_merged_v2", |
| "output_dir": "/home/jack/code/lerobot/outputs/recap_value_0", |
| "labels_csv_path": null, |
| "root": "/home/jack/.cache/huggingface/lerobot", |
| "revision": null, |
| "episodes": null, |
| "epochs": 2, |
| "batch_size": 4, |
| "gradient_accumulation_steps": 4, |
| "num_workers": 4, |
| "learning_rate": 0.0001, |
| "weight_decay": 0.0001, |
| "warmup_ratio": 0.05, |
| "max_grad_norm": 1.0, |
| "use_class_weights": true, |
| "val_split_ratio": 0.1, |
| "seed": 42, |
| "device": "auto", |
| "max_train_steps_per_epoch": null, |
| "max_val_steps_per_epoch": null, |
| "log_every_n_steps": 100, |
| "validate_every_n_train_steps": 50, |
| "plot_every_n_train_steps": 200, |
| "max_val_steps_per_step_validation": 20, |
| "val_plot_num_episodes": 4, |
| "val_plot_num_frames": 8, |
| "val_plot_every_n_epochs": 1, |
| "c_fail": 500.0, |
| "num_value_bins": 50, |
| "text_tokenizer_name": "google/paligemma-3b-pt-224", |
| "tokenizer_max_length": 96, |
| "image_size": 224, |
| "max_state_dim": 32, |
| "paligemma_variant": "gemma_2b", |
| "model_precision": "bfloat16", |
| "freeze_vision_encoder": true, |
| "freeze_backbone": true, |
| "num_unfrozen_backbone_layers": 3, |
| "num_vlm_layers": 10, |
| "dropout": 0.1, |
| "pretrained_path": "lerobot/pi05_base", |
| "wandb_project": null, |
| "wandb_entity": null, |
| "wandb_run_name": null |
| } |